Nginxpython
#!/usr/bin/python# -*- coding: utf-8 -*-"""用于切分 nginx日志nginx日志 格式:log_format access '$remote_addr - $remote_user [$time_local] "$request" ' '$status $body_bytes_sent "$http_referer" ' '"$http_user_agent" $http_x_forwarded_for';INSERT INTO `nginxlog` (`status`, `remote_user`, `http_referer`, `remote_addr`, `http_x_forwarded_for`, `hostname`, `request`, `request_type`, `http_user_agent`, `time_local`) VALUES ('2', '2', '2', '2', '2', '2', '2', '2', '2', '2')table sql:CREATE TABLE `nginxlog` ( `status` int(4) DEFAULT NULL, `remote_user` varchar(20) DEFAULT NULL, `http_referer` text, `remote_addr` varchar(20) DEFAULT NULL, `http_x_forwarded_for` varchar(20) DEFAULT NULL, `hostname` varchar(50) DEFAULT NULL, `request` varchar(200) DEFAULT NULL, `request_type` varchar(10) DEFAULT NULL, `http_user_agent` varchar(200) DEFAULT NULL, `time_local` int(10) unsigned DEFAULT NULL, `server_id` int(5) unsigned DEFAULT NULL) ENGINE=MyISAM DEFAULT CHARSET=utf8"""#--------------------------config----------------------------------------------------------------------#configmysql = {}mysql['mhost'] = 'localhost'mysql['muser'] = 'root'mysql['mpwd'] = ''mysql['mport'] = '3306'mysql['mdbname'] = 'nginxlog' #获取当前执行目录 工作目录ROOT = '/home/nginxweblog/app/'#ftp 目录 用于存放生产 原始 日志Sourcedatadir = '/home/nginxweblog/sourcedata/'#log runLogfile = ROOT+'nginxlog.log'#--------------------------------import--------------------------------------------------------import re,time,osimport MySQLdb#--------------------------------Sublog--------------------------------------------------------#分析 每行的 nginxlog 数据class Sublog: def __init__(self, logline=None): if logline: self.logline = logline.strip() self.logdate = {} #self.run() def run(self): if self.logline: self.splitlog() self.getlog() def splitlog(self): # 按照空格切分日志 self.splitlogd = re.split(' ', self.logline) #print self.splitlogd return self.splitlogd def getlog(self): if len(self.splitlogd) >= 20: self.logdate['remote_addr'] = self.splitlogd[0] self.logdate['remote_user'] = self.splitlogd[2] self.logdate['time_local'] = time.mktime(time.strptime(self.splitlogd[3], '[%d/%b/%Y:%H:%M:%S')) self.logdate['request_type'] = self.splitlogd[5].replace('/"', '') self.logdate['request'] = self.splitlogd[6] self.logdate['status'] = self.splitlogd[8] self.logdate['http_referer'] = MySQLdb.escape_string(self.splitlogd[10].replace('/"', '')) self.logdate['http_x_forwarded_for'] = self.splitlogd[len(self.splitlogd)-1] self.get_http_user_agent() self.get_hostname() def get_http_user_agent(self): agent = re.compile(r'/"(.*?)/"') self.logdate['http_user_agent'] = MySQLdb.escape_string(agent.findall(self.logline)[2]) def get_hostname(self): hostname_r = re.compile(r'http://(.*?)/') ishostname = hostname_r.findall(self.splitlogd[10]) #print ishostname if len(ishostname) >= 1: self.logdate['hostname'] = ishostname[0] else: self.logdate['hostname'] = '-' #--------------------------------insert_log---------------------------------------------# 生成 csv 数据 ||| 分割 数据 , 主要用于 mysql快速导入class insert_log(Sublog): def __init__(self, logpath, serverid): Sublog.__init__(self) self.nowtime = time.strftime("%Y_%m_%d",time.localtime(time.time())) self.sunlogfile = "nginxlog_"+self.nowtime+'.csv' self.logpath = logpath self.serverid = serverid self.numbres = 0 #self.Mysql_db_instance = Mysql_db() def createsubfile(self): self.opencsvfile() self.subfile = open(ROOT+'CSV/'+self.sunlogfile, 'a') olog = open(self.logpath) for i in olog: #self.logdate = {} self.logline = i.strip() self.run() if self.logdate: #print self.logdate self.createsql() #self.insertdb() #time.sleep(3) self.numbres = self.numbres + 1 self.subfile.close() return [ROOT+'CSV/'+self.sunlogfile, self.sunlogfile, self.numbres] def insertdb(self): sql = "INSERT INTO `nginxlog` (`status`, `remote_user`, `http_referer`, `remote_addr`, `http_x_forwarded_for`, `hostname`, `request`, `request_type`, `http_user_agent`, `time_local`) VALUES ('%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s');" % (self.logdate['status'], self.logdate['remote_user'], self.logdate['http_referer'], self.logdate['remote_addr'], self.logdate['http_x_forwarded_for'], self.logdate['hostname'], self.logdate['request'], self.logdate['request_type'], self.logdate['http_user_agent'], self.logdate['time_local']) #print sql self.subfile.write(sql + '/n') #self.Mysql_db_instance.insertsql(sql) #self.Mysql_db_instance.commit() def createsql(self): c = "%s|||%s|||%s|||%s|||%s|||%s|||%s|||%s|||%s|||%s|||%s" % (self.logdate['status'], self.logdate['remote_user'], self.logdate['http_referer'], self.logdate['remote_addr'], self.logdate['http_x_forwarded_for'], self.logdate['hostname'], self.logdate['request'], self.logdate['request_type'], self.logdate['http_user_agent'], self.logdate['time_local'],self.serverid) self.subfile.write(c + '/n') def opencsvfile(self): if os.path.isdir(ROOT+'CSV'): pass else: os.mkdir(ROOT+'CSV')#--------------------------------function ---------------------------------------------def load_mysql(csvfile): if mysql['mpwd']: os.system("""mysql -h%s -u%s -p%s -e "LOAD DATA INFILE '%s' INTO TABLE nginxlog.nginxlog FIELDS TERMINATED BY '|||';" && rm -rf %s """ % (mysql['mhost'], mysql['muser'], mysql['mpwd'], csvfile, csvfile)) else: os.system("""mysql -h%s -u%s -e "LOAD DATA INFILE '%s' INTO TABLE nginxlog.nginxlog FIELDS TERMINATED BY '|||';" && rm -rf %s """ % (mysql['mhost'], mysql['muser'], csvfile, csvfile)) #索引处理 type = 0 删除索引, 1创建索引def mysqlindex(type): if mysql['mpwd']: ism = "mysql -h%s -u%s -p%s -e " % (mysql['mhost'], mysql['muser'], mysql['mpwd']) else: ism = "mysql -h%s -u%s -e " % (mysql['mhost'], mysql['muser']) if type == 0: os.system(" %s 'alter table nginxlog.nginxlog drop index time_local' " % ism) os.system(" %s 'alter table nginxlog.nginxlog drop index hostname' " % ism) os.system(" %s 'alter table nginxlog.nginxlog drop index remote_addr' " % ism) elif type == 1: os.system(" %s 'alter table nginxlog.nginxlog add index time_local(time_local)' " % ism) os.system(" %s 'alter table nginxlog.nginxlog add index hostname(hostname)' " % ism) os.system(" %s 'alter table nginxlog.nginxlog add index remote_addr(remote_addr)' " % ism)""" 日志记录函数 """def write_logs(logconten): logfile_path = Logfile if logfile_path and logconten: log_write = open(logfile_path, 'a') log_write.write(logconten+'/n') log_write.close()##--------------------------------运行---------------------------------------------def run(): list = os.listdir(Sourcedatadir) print 'start.......' write_logs('-----------%s start.....----%s-----------------' % ( time.strftime("%Y-%m-%d %H:%M:%S",time.localtime(time.time())), str(list)) ) for line in list: if os.path.isfile(Sourcedatadir+line): s = re.split('_', line) if re.match(r'^/d.*$', s[0]): write_logs('time(%s) logfile(%s) start ....' % (time.strftime("%Y-%m-%d %H:%M:%S",time.localtime(time.time())), line)) insert_log_instance = insert_log(Sourcedatadir+line, s[0]) gfiledata = insert_log_instance.createsubfile() load_mysql(gfiledata[0]) os.remove(Sourcedatadir+line) write_logs('time(%s) logfile(%s) log numbers(%s) stop ....' % (time.strftime("%Y-%m-%d %H:%M:%S",time.localtime(time.time())), line, str(gfiledata[2])) ) print Sourcedatadir+line+ ' OK ....... ' #-------------------------------- exece ---------------------------------------------if __name__ == "__main__": #run_inotify(Sourcedatadir, run) run()

在数据库优化中,应根据查询需求选择索引策略:1.当查询涉及多个列且条件顺序固定时,使用复合索引;2.当查询涉及多个列但条件顺序不固定时,使用多个单列索引。复合索引适用于优化多列查询,单列索引则适合单列查询。

要优化MySQL慢查询,需使用slowquerylog和performance_schema:1.启用slowquerylog并设置阈值,记录慢查询;2.利用performance_schema分析查询执行细节,找出性能瓶颈并优化。

MySQL和SQL是开发者必备技能。1.MySQL是开源的关系型数据库管理系统,SQL是用于管理和操作数据库的标准语言。2.MySQL通过高效的数据存储和检索功能支持多种存储引擎,SQL通过简单语句完成复杂数据操作。3.使用示例包括基本查询和高级查询,如按条件过滤和排序。4.常见错误包括语法错误和性能问题,可通过检查SQL语句和使用EXPLAIN命令优化。5.性能优化技巧包括使用索引、避免全表扫描、优化JOIN操作和提升代码可读性。

MySQL异步主从复制通过binlog实现数据同步,提升读性能和高可用性。1)主服务器记录变更到binlog;2)从服务器通过I/O线程读取binlog;3)从服务器的SQL线程应用binlog同步数据。

MySQL是一个开源的关系型数据库管理系统。1)创建数据库和表:使用CREATEDATABASE和CREATETABLE命令。2)基本操作:INSERT、UPDATE、DELETE和SELECT。3)高级操作:JOIN、子查询和事务处理。4)调试技巧:检查语法、数据类型和权限。5)优化建议:使用索引、避免SELECT*和使用事务。

MySQL的安装和基本操作包括:1.下载并安装MySQL,设置根用户密码;2.使用SQL命令创建数据库和表,如CREATEDATABASE和CREATETABLE;3.执行CRUD操作,使用INSERT,SELECT,UPDATE,DELETE命令;4.创建索引和存储过程以优化性能和实现复杂逻辑。通过这些步骤,你可以从零开始构建和管理MySQL数据库。

InnoDBBufferPool通过将数据和索引页加载到内存中来提升MySQL数据库的性能。1)数据页加载到BufferPool中,减少磁盘I/O。2)脏页被标记并定期刷新到磁盘。3)LRU算法管理数据页淘汰。4)预读机制提前加载可能需要的数据页。

MySQL适合初学者使用,因为它安装简单、功能强大且易于管理数据。1.安装和配置简单,适用于多种操作系统。2.支持基本操作如创建数据库和表、插入、查询、更新和删除数据。3.提供高级功能如JOIN操作和子查询。4.可以通过索引、查询优化和分表分区来提升性能。5.支持备份、恢复和安全措施,确保数据的安全和一致性。


热AI工具

Undresser.AI Undress
人工智能驱动的应用程序,用于创建逼真的裸体照片

AI Clothes Remover
用于从照片中去除衣服的在线人工智能工具。

Undress AI Tool
免费脱衣服图片

Clothoff.io
AI脱衣机

AI Hentai Generator
免费生成ai无尽的。

热门文章

热工具

SublimeText3 Mac版
神级代码编辑软件(SublimeText3)

SublimeText3汉化版
中文版,非常好用

禅工作室 13.0.1
功能强大的PHP集成开发环境

记事本++7.3.1
好用且免费的代码编辑器

适用于 Eclipse 的 SAP NetWeaver 服务器适配器
将Eclipse与SAP NetWeaver应用服务器集成。