本文整理汇总了Python中pysmartac.log.PLOG.warn方法的典型用法代码示例。如果您正苦于以下问题:Python PLOG.warn方法的具体用法?Python PLOG.warn怎么用?Python PLOG.warn使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类pysmartac.log.PLOG
的用法示例。
在下文中一共展示了PLOG.warn方法的7个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: addJsonInfo
# 需要导入模块: from pysmartac.log import PLOG [as 别名]
# 或者: from pysmartac.log.PLOG import warn [as 别名]
def addJsonInfo(jsonSourcefile,destJson):
filedir = os.path.dirname(jsonSourcefile)
parentDirName = os.path.split(filedir)[-1]
primaryFilename = ""
jsSourceFileInfo = None
with open(jsonSourcefile,"r") as f:
jsSourceFileInfo = json.load(f,'utf8')
if jsSourceFileInfo !=None and isinstance(jsSourceFileInfo,dict):
if jsSourceFileInfo.has_key("file"):
primaryFilename = jsSourceFileInfo["file"]
if primaryFilename != "":
jsSourceFileInfo["id"] = str(uuid.uuid1())
if primaryFilename.startswith("https:") :
# ios info file
filetimestamp = time.localtime( os.path.getmtime(jsonSourcefile))
primaryFileTime = time.strftime('%Y-%m-%d %H:%M:%S',filetimestamp)
jsSourceFileInfo["filetime"] = primaryFileTime
if not jsSourceFileInfo.has_key("filesize") :
jsSourceFileInfo["filesize"] = "0"
#destJson["list"].append(jsSourceFileInfo)
else:
try:
primaryFileSize = os.path.getsize(os.path.join(filedir,primaryFilename))
filetimestamp = time.localtime( os.path.getmtime(os.path.join(filedir,primaryFilename)) )
primaryFileTime = time.strftime('%Y-%m-%d %H:%M:%S',filetimestamp)
jsSourceFileInfo["filesize"] = str(primaryFileSize)
jsSourceFileInfo["filetime"] = primaryFileTime
if jsSourceFileInfo.has_key("file") :
jsSourceFileInfo["file"] = parentDirName +'/' + jsSourceFileInfo["file"]
except:
PLOG.info("generate file info of dir %s failed,primary File %s not find,skip it"% (filedir,primaryFilename))
return
if jsSourceFileInfo.has_key("poster") :
jsSourceFileInfo["poster"] = parentDirName +'/' + jsSourceFileInfo["poster"]
if jsSourceFileInfo.has_key("thumbnail") :
jsSourceFileInfo["thumbnail"] = parentDirName +'/' + jsSourceFileInfo["thumbnail"]
if jsSourceFileInfo.has_key("extend") :
jsextend = jsSourceFileInfo["extend"]
if jsextend.has_key("screenshot") :
jsscreenshottmp = []
for picture in jsextend["screenshot"] :
picture = parentDirName +'/' + picture
jsscreenshottmp.append(picture)
jsextend["screenshot"] =jsscreenshottmp
destJson["list"].append(jsSourceFileInfo)
PLOG.debug('generate file info of dir "%s" success'%(filedir))
else:
PLOG.debug("generate file info of dir %s failed,primary File name is empty"% (filedir))
else :
PLOG.debug('not find "file" node in info file %s , skip it' %(jsonSourcefile))
else:
PLOG.warn('js file %s is null,maybe path error! skip it' %(jsonSourcefile))
示例2: login
# 需要导入模块: from pysmartac.log import PLOG [as 别名]
# 或者: from pysmartac.log.PLOG import warn [as 别名]
def login(self):
try:
FTP.connect(self,self.host,timeout=10)
except:
PLOG.warn('Can not connect to ftp server "%s"' % self.host)
return False
try:
FTP.login(self,self.user,self.pwd)
except:
PLOG.warn('Login ftp server "%s" failed ,username or password error' % self.host)
return False
return True
示例3: run
# 需要导入模块: from pysmartac.log import PLOG [as 别名]
# 或者: from pysmartac.log.PLOG import warn [as 别名]
def run(self):
self.status = "waiting start"
if not self.enabled:
self.status = "disabled"
return
if self.bootwait > 0:time.sleep(self.bootwait)
while not self.thread_stop:
self.status = "checking"
# print '%s start checking at %s ...\n' %(self.name,time.ctime())
if self.processHandle!=None and self.processHandle.poll()!=None:
print "recycle %s" % (self.name)
self.processHandle=None
if not check_pid(self._pid):self.update_pid()
if not check_pid(self._pid):self._pid = 0
if self._pid == 0:
self.processHandle=None
PLOG.warn("%s check failed!restarting ..." % (self.name))
if self.rebootwait > 0:
self.status = "waiting restart"
PLOG.info("%s restarting wait %d second..." % (self.name, self.rebootwait))
time.sleep(self.rebootwait)
try:
self.status = "starting"
# 修改当前路径
if len(self.runpath) > 0:
try:
if not os.path.isdir(self.runpath):os.makedirs(self.runpath)
if not os.path.isdir(self.runpath):
self.enabled = False
PLOG.error("%s run path invalid!"%(self.name))
break
os.chdir(self.runpath)
except Exception, e:
PLOG.error("%s restart failed!change current path failed!err=%s" % (self.name, e))
PLOG.info("%s execute command:'%s'"%(self.name,self.command))
self.processHandle=subprocess.Popen(self.command, bufsize=0, executable=None, stdin=None,
stdout=None,
stderr=None,
preexec_fn=None,
close_fds=False,
shell=True,
cwd=self.runpath, env=None,
universal_newlines=False,
startupinfo=None,
creationflags=0)
self._pid=self.processHandle.pid
except Exception, e:
PLOG.error("%s restart failed!err=%s" % (self.name, e))
示例4: run
# 需要导入模块: from pysmartac.log import PLOG [as 别名]
# 或者: from pysmartac.log.PLOG import warn [as 别名]
def run():
init()
if conf.httpServerSite[-1] != '/' : conf.httpServerSite += '/'
# 爬取文件,生成最终json文件 start
if len(conf.movieDir) != 0 :
scanFile(conf.movieDir,"movie")
else:
PLOG.warn("moviedir is empty,please check config file")
if len(conf.androidDir) != 0 :
scanFile(conf.androidDir,"android")
else:
PLOG.warn("androiddir is empty,please check config file")
if len(conf.iosDir) != 0 :
scanFile(conf.iosDir,"ios")
else:
PLOG.warn("iosdir is empty,please check config file")
示例5: scanFtpServerFiles
# 需要导入模块: from pysmartac.log import PLOG [as 别名]
# 或者: from pysmartac.log.PLOG import warn [as 别名]
def scanFtpServerFiles(self,root,filetype):
PLOG.debug('Type["%s"] file start crawling...ftpserver = %s ,dir = %s ' %(filetype,self.host,root))
outputjsfilename = ""
filesource = ""
if filetype == "movie":
outputjsfilename = conf.movieOutputFile
filesource = conf.ftpServerMovieSource
elif filetype == "app":
outputjsfilename = conf.appOutputFile
filesource = conf.ftpServerAppSource
# 枚举工作目录下的所有目录
fileDir = self.listdir(root)
# 所有电影 or APP信息json串
allJsonInfo = {}
allJsonInfo["update"] = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
allJsonInfo["source"] = filesource
allJsonInfo["list"] =[]
for filedir in fileDir :
PLOG.debug('start generate file info of dir "%s"...'%(root+filedir))
fileItems = self.listFile(root+filedir)
primaryFilename = ""
primaryFileSize = ""
primaryFileTime = ""
jsFileInfo = None
for fileitem in fileItems:
if fileitem[-5:] == ".json" :
fileinfo = []
if fileitem.find("/") == -1 : fileitem = root+filedir+'/'+fileitem
try:
self.retrlines("RETR %s"%fileitem,fileinfo.append)
except:
PLOG.warn('retr %s except! skip it !' %fileitem)
filedetailinfo = ""
for linestr in fileinfo:
filedetailinfo += linestr
if filedetailinfo != "":
try:
filedetailinfo = filedetailinfo.decode("gbk")
except:
pass
try:
filedetailinfo = filedetailinfo.decode("gb2312")
except:
pass
#PLOG.debug("decode failed! %s is not encoded by gbk")
jsFileInfo = json.loads(filedetailinfo,'utf8')
if jsFileInfo !=None :
if jsFileInfo.has_key("file"):
primaryFilename = jsFileInfo["file"]
else :
PLOG.debug('not find "file" node in info file %s , skip it' %(fileitem))
else:
PLOG.error('js file %s is null,maybe path error! skip it' %(fileitem))
break
if jsFileInfo != None and jsFileInfo != "" :
if primaryFilename != "" :
try:
timestamp = []
self.retrlines("LIST %s"%root+filedir+'/'+primaryFilename,lambda x:timestamp.append(self.separateFileTime(x)))
primaryFileSize = self.size(root+filedir+'/'+primaryFilename)
primaryFileTime = timestamp.pop()
jsFileInfo["filesize"] = primaryFileSize
jsFileInfo["filetime"] = primaryFileTime
jsFileInfo["id"] = str(uuid.uuid1())
filerelativedir = filedir + '/'
if jsFileInfo.has_key("file") :
jsFileInfo["file"] = filerelativedir +jsFileInfo["file"]
if jsFileInfo.has_key("poster") :
jsFileInfo["poster"] = filerelativedir +jsFileInfo["poster"]
if jsFileInfo.has_key("thumbnail") :
jsFileInfo["thumbnail"] = filerelativedir +jsFileInfo["thumbnail"]
if jsFileInfo.has_key("extend") :
jsextend = jsFileInfo["extend"]
if jsextend.has_key("screenshot") :
jsscreenshottmp = []
for picture in jsextend["screenshot"] :
picture = filerelativedir + picture
jsscreenshottmp.append(picture)
jsextend["screenshot"] =jsscreenshottmp
allJsonInfo["list"].append(jsFileInfo)
PLOG.debug('generate file info of dir "%s" success'%(root+filedir))
except:
PLOG.warn('retr %s except! skip it !' %(root+filedir+'/'+primaryFilename))
PLOG.debug("generate file info of dir %s failed,not find primary File %s" % (root+filedir,primaryFilename))
else:
PLOG.debug("generate file info of dir %s failed,primary File name is empty"% (root+filedir))
else:
PLOG.debug("generate file info of dir %s failed,not find js info file"% (root+filedir))
if(outputjsfilename == ""):
PLOG.debug("unkown file type!")
return 0
with open(outputjsfilename, "w") as f:
json.dump(allJsonInfo, f,indent=4,ensure_ascii=False)
# 将json文件传到ftpserver
ttt = len(outputjsfilename)
with open(outputjsfilename,"r") as f:
try:
outputdirtmp=conf.ftpJsonOutputPath.replace("ftp://","")
outputdir = outputdirtmp[outputdirtmp.find("/")+1:]
#.........这里部分代码省略.........
示例6: scanFtpServerFiles
# 需要导入模块: from pysmartac.log import PLOG [as 别名]
# 或者: from pysmartac.log.PLOG import warn [as 别名]
def scanFtpServerFiles(self, root, filetype):
PLOG.debug('Type["%s"] file start crawling...ftpserver = %s ,dir = %s ' % (filetype, self.host, root))
outputjsfilename = ""
filesource = ""
if filetype == "movie":
outputjsfilename = conf.movieOutputFile
filesource = conf.ftpServerMovieSource
elif filetype == "app":
outputjsfilename = conf.appOutputFile
filesource = conf.ftpServerAppSource
# 枚举工作目录下的所有目录
fileDir = self.listdir(root)
# 所有电影 or APP信息json串
allJsonInfo = {}
allJsonInfo["update"] = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
allJsonInfo["source"] = filesource
allJsonInfo["list"] = []
for filedir in fileDir:
PLOG.debug('start generate file info of dir "%s"...' % (root + filedir))
dictFileItems = self.listFile(root + filedir)
primaryFilename = ""
primaryFileSize = ""
primaryFileTime = ""
jsFileInfo = None
for (k, v) in dictFileItems.items():
if v.ext == ".json":
fileinfo = []
try:
self.retrlines("RETR %s" % root + filedir + "/" + v.fname, fileinfo.append)
except:
PLOG.warn("retr %s except! skip it !" % v.fname)
filedetailinfo = ""
for linestr in fileinfo:
filedetailinfo += linestr
if filedetailinfo != "":
filedetailinfo = filedetailinfo.decode("gbk")
jsFileInfo = json.loads(filedetailinfo, "utf8")
if jsFileInfo != None:
if jsFileInfo.has_key("file"):
primaryFilename = jsFileInfo["file"]
else:
PLOG.debug('not find "file" node in info file %s , skip it' % (v.fname))
else:
PLOG.error("js file %s is null,maybe path error! skip it" % (v.fname))
break
if jsFileInfo != "":
if primaryFilename != "":
if dictFileItems.has_key(primaryFilename):
primaryFileItem = dictFileItems[primaryFilename]
primaryFileSize = primaryFileItem.size
time = primaryFileItem.time[: primaryFileItem.time.find(".")]
primaryFileTime = datetime.datetime.strptime(time, "%Y%m%d%H%M%S").strftime("%Y-%m-%d %H:%M:%S")
jsFileInfo["filesize"] = primaryFileSize
jsFileInfo["filetime"] = primaryFileTime
jsFileInfo["id"] = str(uuid.uuid1())
filerelativedir = filedir + "/"
if jsFileInfo.has_key("file"):
jsFileInfo["file"] = filerelativedir + jsFileInfo["file"]
if jsFileInfo.has_key("poster"):
jsFileInfo["poster"] = filerelativedir + jsFileInfo["poster"]
if jsFileInfo.has_key("thumbnail"):
jsFileInfo["thumbnail"] = filerelativedir + jsFileInfo["thumbnail"]
if jsFileInfo.has_key("extend"):
jsextend = jsFileInfo["extend"]
if jsextend.has_key("screenshot"):
jsscreenshottmp = []
for picture in jsextend["screenshot"]:
picture = filerelativedir + picture
jsscreenshottmp.append(picture)
jsextend["screenshot"] = jsscreenshottmp
allJsonInfo["list"].append(jsFileInfo)
PLOG.debug('generate file info of dir "%s" success' % (root + filedir))
else:
PLOG.debug(
"generate file info of dir %s failed,not find primary File %s"
% (root + filedir, primaryFilename)
)
else:
PLOG.debug("generate file info of dir %s failed,primary File name is empty" % (root + filedir))
else:
PLOG.debug("generate file info of dir %s failed,not find js info file" % (root + filedir))
if outputjsfilename == "":
PLOG.debug("unkown file type!")
return 0
with open(outputjsfilename, "w") as f:
json.dump(allJsonInfo, f, indent=4, ensure_ascii=False)
# 将json文件传到ftpserver
with open(outputjsfilename, "r") as f:
try:
outputdirtmp = conf.ftpJsonOutputPath.replace("ftp://", "")
outputdir = outputdirtmp[outputdirtmp.find("/") + 1 :]
self.storlines("STOR %s" % outputdir + outputjsfilename, f)
PLOG.debug("upload json file %s success !" % outputjsfilename)
except:
PLOG.warn("upload json file %s failed,exception !" % outputjsfilename)
PLOG.debug('Type["%s"] file crawl dir %s finished' % (filetype, root))
示例7: statisticsCurrentDayData
# 需要导入模块: from pysmartac.log import PLOG [as 别名]
# 或者: from pysmartac.log.PLOG import warn [as 别名]
def statisticsCurrentDayData(daydate) :
nextday = daydate+datetime.timedelta(days=1)
startquerytime = daydate
endquerytime = daydate+datetime.timedelta(hours=SAPeakDataPublic.st.queryunit)
while endquerytime<=nextday:
acctquerysql = "select acctinputoctets,acctoutputoctets,acctstarttime,acctstoptime,regionid from %s where acctstarttime>='%s' and acctstarttime<'%s'"%\
(SAPeakDataPublic.sadb.tablename,startquerytime.strftime('%Y-%m-%d %H:%M:%S'),endquerytime.strftime('%Y-%m-%d %H:%M:%S'))
PLOG.debug("sql=%s"%acctquerysql)
startquerytime=endquerytime
endquerytime=endquerytime+datetime.timedelta(hours=SAPeakDataPublic.st.queryunit)
i = 0
while i<SAPeakDataPublic.st.queryrepeattimes:
res = SAPeakDataPublic.querysql(acctquerysql)
if res!=None:
break
else:
i = i+1
if i==3 or res==None:
print("%s statistics data failed! db query appear error %d consecutive times,please execute again later!"%(daydate.strftime('%Y-%m-%d'),SAPeakDataPublic.st.queryrepeattimes))
PLOG.info("%s statistics data failed! db query appear error %d consecutive times,please execute again later!"%(daydate.strftime('%Y-%m-%d'),SAPeakDataPublic.st.queryrepeattimes))
return
# 统计数据
PLOG.trace("start statistics...")
for row in res:
if row[2] ==None or row[3] ==None or row[4] ==None:
PLOG.warn("lack essential data!skip this data")
continue
regionid = row[4]
totalflow = 0
if row[0]!=None:
totalflow += row[0]
if row[1]!=None:
totalflow += row[1]
if row[3].day > row[2].day:
# 跨天
endMinute = 23*60+59
elif row[3].day < row[2].day:
PLOG.info("stoptime day less than starttime day,invalid data,skip")
else:
endMinute = row[3].hour*60+row[3].minute
startMinute = row[2].hour*60+row[2].minute
#startMinute = datetime.datetime.strptime(row[2],'%Y-%m-%d %H:%M:%S')
#endMinute = datetime.datetime.strptime(row[3],'%Y-%m-%d %H:%M:%S')
totalMinute = endMinute-startMinute + 1
if totalMinute <=0:
PLOG.info("stoptime less than starttime,invalid data,skip")
continue
if SAStopDefine.stopDc.stops.has_key(regionid):
stop = SAStopDefine.stopDc.stops[regionid]
startindex = startMinute
endindex = endMinute
flowOneMinute = float(totalflow)/totalMinute/1024/1024
index = startindex
while index <= endindex:
stop.dayArray[index][0] += 1
stop.dayArray[index][1] += flowOneMinute
if stop.dayArray[index][0] > stop.peakonlinenum:
stop.peakonlinenum = stop.dayArray[index][0]
stop.peakonlinetime = datetime.datetime(daydate.year,daydate.month,daydate.day,index/60,index%60)
if stop.dayArray[index][0] > stop.peakbandwidth:
stop.peakbandwidth = stop.dayArray[index][1]
stop.peakbandwidthtime = datetime.datetime(daydate.year,daydate.month,daydate.day,index/60,index%60)
index += 1
PLOG.trace("statistics end")
# 数据处理结束,输出各站点峰值数据
for stopid,stop in stopsCentor.stops.items():
peakbandwidth = stop.peakbandwidth*8/60
print("%s %s %d %.2f"%(daydate.strftime('%Y-%m-%d'),stop.name,stop.peakonlinenum,peakbandwidth))
PLOG.debug("%s %s %d %.2f %s %s"%(daydate.strftime('%Y-%m-%d'),stop.name,stop.peakonlinenum,peakbandwidth,stop.peakonlinetime.strftime('%H:%M'),stop.peakbandwidthtime.strftime('%H:%M')))