标签:exec user 格式 create blog from list log 删除
import pandas as pd
df = pd.read_csv("./demo.csv")
df = pd.read_table("./demo.csv",sep=',')
pd.read_csv("./demo.csv",header=None)
pd.read_csv("./demo.csv",names=['a','b','c','d','message'])
names=['a','b','c','d',"message"]
#指定一个索引字段index_col
pd.read_csv("./demo.csv",names=names,index_col="message")
#表示跳过0,2,3行
df = pd.read_csv("./demo.txt",skiprows=[0,2,3])
#去除掉message列不NaN的行
result = pd.read_csv("./demo.csv")
result[result.message.isnull()!=True]
df = pd.read_csv("./demo.csv",nrow=5)
chunker = pd.read_csv('./demo.csv',chunksize=100)
data.to_csv("outer.csv")
|
作为分隔符data.to_csv(sys.stdout,sep="|")
#指定NULL做替换
data.to_csv(sys.stdout,na_rep="NULL")
data.to_csv(sys.stdout,index=False,header=False)
data.to_csv(sys.stdout,index=False,columns=['a','b','c'])
list(open("./demo.txt"))
df = pd.read_table("./demo.txt",sep='\s+')
import csv
fp = open("demo.csv")
read = csv.reader(fp)
for line in read:
print(line)
fp.close()
import json
res = json.dumps(obj,ensure_ascii=False)
from lxml import objectify
#表示出2000-1-1开始后38天
import pandas as pd
import numpy as np
from pandas import Series,DataFrame
dates = pd.date_range("1/1/2000",periods=38)
ts = Series(np.arange(38),index=dates)
ts
import sqlite3
query = """
CREATE TABLE test(a VARCHAR(20),b VARCHAR(20),c REAL,d INTEGER);
"""
con = sqlite3.connect(":memory:")
con.execute(query)
con.commit()
data = [("Atlanta","Georgia",1.25,6),("Tallahassee","Florida",2.6,3),("Sacramento","California",1.7,5)]
stmt = "INSERT INTO test VALUES(?,?,?,?)"
con.executemany(stmt,data)
con.commit()
cursor = con.execute("select * from test")
rows = cursor.fetchall()
#cursor.description 为游标描述
DataFrame(rows,columns=list(zip(*cursor.description))[0])
#coding=utf-8
import pymysql
conn = pymysql.connect(host='localhost',port=3306,user="root",passwd="123",db="day39")
cur = conn.cursor()
#查询
cur.execute("select * from e1")
res = cur.fetchall()
res
#创建数据表
cur.execute("create table stud(id int,name varchar(20),class varchar(30),age varchar(10))")
#插入一条数据
cur.execute("insert into stud values(1,'Tom','3year2class','9')")
#修改数据
cur.execute("update stud set age='10' where name='Tom'")
#删除数据:
cur.execute("delete from stud where age='9'")
conn.commit()
cur.close()
conn.close()
#coding:utf8
import memcache
class MemcachedClient():
''' python memcached 客户端操作示例 '''
def __init__(self, hostList):
self.__mc = memcache.Client(hostList);
def set(self, key, value):
result = self.__mc.set("name", "NieYong")
return result
def get(self, key):
name = self.__mc.get("name")
return name
def delete(self, key):
result = self.__mc.delete("name")
return result
if __name__ == '__main__':
mc = MemcachedClient(["127.0.0.1:11511", "127.0.0.1:11512"])
key = "name"
result = mc.set(key, "NieYong")
print "set的结果:", result
name = mc.get(key)
print "get的结果:", name
result = mc.delete(key)
print "delete的结果:", result
#encoding:utf=8
import pymongo
connection=pymongo.Connection('10.32.38.50',27017)
#选择myblog库
db=connection.myblog
# 使用users集合
collection=db.users
# 添加单条数据到集合中
user = {"name":"cui","age":"10"}
collection.insert(user)
#同时添加多条数据到集合中
users=[{"name":"cui","age":"9"},{"name":"cui","age":"11"}]
collection.insert(users)
#查询单条记录
print collection.find_one()
#查询所有记录
for data in collection.find():
print data
#查询此集合中数据条数
print collection.count()
#简单参数查询
for data in collection.find({"name":"1"}):
print data
#使用find_one获取一条记录
print collection.find_one({"name":"1"})
#高级查询
print "__________________________________________"
print '''''collection.find({"age":{"$gt":"10"}})'''
print "__________________________________________"
for data in collection.find({"age":{"$gt":"10"}}).sort("age"):
print data
# 查看db下的所有集合
print db.collection_names()
import requests
url = "https://api.github.com/repositories/858127/milestones/28/labels"
res = requests.get(url)
df = DataFrame(res)
标签:exec user 格式 create blog from list log 删除
原文地址:https://www.cnblogs.com/xujunkai/p/12129590.html