本文整理汇总了Python中pygaga.helpers.logger.log_init函数的典型用法代码示例。如果您正苦于以下问题:Python log_init函数的具体用法?Python log_init怎么用?Python log_init使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了log_init函数的20个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Python代码示例。
示例1: my_filter
'/', 'home',
)
def my_filter(input):
return input
ENV.filters['my_filter'] = my_filter
class home:
def GET(self):
return 'test'
if __name__ == "__main__":
# usage: ${prog} ip:port --daemon --stderr ...
gflags.DEFINE_boolean('daemon', False, "is start in daemon mode?")
gflags.DEFINE_boolean('webdebug', False, "is web.py debug")
gflags.DEFINE_boolean('reload', False, "is web.py reload app")
backup_args = []
backup_args.extend(sys.argv)
sys.argv = [sys.argv[0],] + sys.argv[2:]
log_init('WebLogger', "sqlalchemy.*")
sys.argv = backup_args[:2]
web.config.debug = FLAGS.webdebug
if len(sys.argv) == 1:
web.wsgi.runwsgi = lambda func, addr=None: web.wsgi.runfcgi(func, addr)
if FLAGS.daemon:
daemon.daemonize(os.path.join(file_path, 'web.pid'))
#render = web.template.render('templates/', base='layout')
app = web.application(urls, globals(), autoreload=FLAGS.reload)
app.run()
开发者ID:qiaohui,项目名称:pygaga,代码行数:29,代码来源:web_template.py
示例2: check_graphite
gflags.DEFINE_boolean('gt', True, "Alert if lager than/smaller than")
gflags.DEFINE_float('warnv', 0.0, "Warning thredshold")
gflags.DEFINE_float('errorv', 0.0, "Error thredshold")
gflags.DEFINE_string('since', "-1days", "From time")
gflags.DEFINE_string('until', "-", "Until time")
def check_graphite(server, target, n, warnv=0.0, errorv=0.0, gt=True, since="-1days", until="-"):
url = "http://%s/render?format=json&from=%s&until=%s&target=%s" % (server, since, until, target)
logger.debug("Fetching %s", url)
data = download(url)
json_data = simplejson.loads(data)
data_points = json_data[0]['datapoints']
lastn_datapoints = list(takelastn(data_points, FLAGS.lastn, lambda x:not x[0]))
logger.debug("Last n data point %s", lastn_datapoints)
is_warn = all_matched(lambda x:not ((x[0]>warnv) ^ gt), lastn_datapoints)
is_error = all_matched(lambda x:not ((x[0]>errorv) ^ gt), lastn_datapoints)
return is_warn, is_error, lastn_datapoints
def alert_main():
is_warn, is_error, lastn_datapoints = check_graphite(FLAGS.server, FLAGS.target, FLAGS.lastn, FLAGS.warnv, FLAGS.errorv, FLAGS.gt, FLAGS.since, FLAGS.until)
if is_error:
logger.error("Alert %s is_gt %s:%s error %s!", FLAGS.target, FLAGS.gt, FLAGS.errorv, lastn_datapoints)
elif is_warn:
logger.warn("Alert %s is_gt %s:%s warning %s!", FLAGS.target, FLAGS.gt, FLAGS.warnv, lastn_datapoints)
if __name__ == "__main__":
# usage: graphite_alert.py --pbverbose warn --use_paperboy --target xxx.xxx --warnv w --errorv e --since -1hours:%s
log_init('AlertLogger', "sqlalchemy.*")
alert_main()
开发者ID:qiaohui,项目名称:pygaga,代码行数:29,代码来源:graphite_alert.py
示例3: len
logger.info("crawled %s len %s", url, len(data))
except KeyboardInterrupt:
raise
except:
logger.warn("crawl failed %s exception %s", url, traceback.format_exc())
def crawl_page(item_id, url, headers):
logger.debug("Crawling %s", url)
data = ""
try:
req = urllib2.Request(url, headers=headers)
u = urllib2.urlopen(req)
data = u.read()
u.close()
except ValueError, e:
logger.info("download %s:%s url value error %s", item_id, url, e.message)
except HTTPError, e1:
if e1.code != 404:
logger.info("download %s:%s failed http code: %s", item_id, url, e1.code)
except URLError, e2:
logger.info("download %s:%s failed url error: %s", item_id, url, e2.reason)
except socket.timeout:
logger.info("download %s:%s failed socket timeout", item_id, url)
return data
if __name__ == "__main__":
log_init("MeiliCrawlLogger")
crawl_all()
开发者ID:iloveyo123u1,项目名称:tb-crawler,代码行数:29,代码来源:crawl_meilishuo.py
示例4: imagemagick_resize
imagemagick_resize(210, 210, big_path, mid_path)
if not os.path.exists(sma_path):
logger.debug("thumbing %s %s", sma_path, item)
imagemagick_resize(60, 60, big_path, sma_path)
except:
logger.error("unknown error %s, %s", item, traceback.format_exc())
def crawl_page(item_id, url, headers):
logger.debug("Crawling %s", url)
data = ""
try:
req = urllib2.Request(url, headers=headers)
u = urllib2.urlopen(req)
data = u.read()
u.close()
except ValueError, e:
logger.info("download %s:%s url value error %s", item_id, url, e.message)
except HTTPError, e1:
logger.info("download %s:%s failed http code: %s", item_id, url, e1.code)
except URLError, e2:
logger.info("download %s:%s failed url error: %s", item_id, url, e2.reason)
except socket.timeout:
logger.info("download %s:%s failed socket timeout", item_id, url)
return data
if __name__ == "__main__":
log_init("ProcessItemLogger")
process_all_items()
开发者ID:iloveyo123u1,项目名称:tb-crawler,代码行数:29,代码来源:process_item_image.py
示例5: log_init
#!/Library/Frameworks/Python.framework/Versions/2.7/Resources/Python.app/Contents/MacOS/Python
# coding: utf-8
import gflags
import sys
from pygaga.helpers.logger import log_init
from guang_crawler.fix_thumb_impl import fix_thumb_main
gflags.DEFINE_string('path', "/space/wwwroot/image.guang.j.cn/ROOT/images/", "image path")
gflags.DEFINE_string('org_path', "/space/wwwroot/image.guang.j.cn/ROOT/org_images/", "org image path")
gflags.DEFINE_string('crawl_path', "/space/crawler/image_crawler/static", "image path")
gflags.DEFINE_integer('itemid', 0, "crawl item id")
gflags.DEFINE_integer('limit', 0, "limit crawl items count")
gflags.DEFINE_string('where', "", "additional where sql, e.g. a=b and c=d")
gflags.DEFINE_boolean('all', False, "crawl all items")
gflags.DEFINE_boolean('removetmp', False, "is remove temperary image files after crawl?")
gflags.DEFINE_boolean('force', False, "is force crawl?")
if __name__ == "__main__":
log_init('CrawlLogger', "sqlalchemy.*")
fix_thumb_main()
开发者ID:iloveyo123u1,项目名称:tb-crawler,代码行数:22,代码来源:fix_thumb.py
示例6: log_init
logger.debug("process %s %s/%s -> %s", shop_id, pos, total, sql)
if not FLAGS.dryrun:
db.execute(sql.replace('%', '%%'))
db.execute("delete from tbk_item_convert where item_id=%s" % id)
except KeyboardInterrupt:
raise
except Exception, e:
logger.debug("in %s out %s" % (numid2id, result))
logger.warn("convert failed %s %s" % (sql, traceback.format_exc()))
except KeyboardInterrupt:
raise
except:
logger.warn("process failed %s %s reason %s" % (input, output, traceback.format_exc()))
logger.info("Convert result %s - %s", converted, total)
if __name__ == "__main__":
log_init(['TaobaokeLogger', 'TaobaoLogger'], "sqlalchemy.*")
if FLAGS.action == 'remove':
if FLAGS.all:
do_all(rollback_shop)
else:
rollback_shop(FLAGS.shop, None)
elif FLAGS.action == 'update':
if FLAGS.all:
do_all(update_shop)
else:
update_shop(FLAGS.shop, None)
elif FLAGS.action == 'vip':
update_vip_shop(FLAGS.shop)
开发者ID:iloveyo123u1,项目名称:tb-crawler,代码行数:29,代码来源:process_taobaoke.py
示例7: test
def test():
log_init("CrawlLogger", "sqlalchemy.*")
db = get_db_engine()
# 测试新建
theshop = ShopExtendInfo(db, 10000001)
theshop.main_category = "服饰箱包"
theshop.location = "浙江杭州"
theshop.good_item_rate = 99.98
theshop.described_remark = 4.8
theshop.described_remark_compare = 32.13
theshop.service_remark = 4.6
theshop.service_remark_compare = -15.20
theshop.support_returnin7day = 0
theshop.support_cash = 1
theshop.support_consumer_guarantees = 1
theshop.support_credit_card = 0
theshop.open_at = datetime.datetime.strptime("2013-1-1", "%Y-%m-%d")
theshop.favorited_user_count = 1234567890
theshop.save()
testtheshop = ShopExtendInfo(db, 10000001)
assert_equal(testtheshop.main_category, "服饰箱包")
assert_equal(testtheshop.location, "浙江杭州")
assert_equal(testtheshop.good_item_rate, 99.98)
assert_equal(testtheshop.described_remark, 4.8)
assert_equal(testtheshop.described_remark_compare, 32.13)
assert_equal(testtheshop.service_remark, 4.6)
assert_equal(testtheshop.service_remark_compare, -15.20)
assert_equal(testtheshop.support_returnin7day, 0)
assert_equal(testtheshop.support_cash, 1)
assert_equal(testtheshop.support_consumer_guarantees, 1)
assert_equal(testtheshop.support_credit_card, 0)
assert_equal(testtheshop.open_at, datetime.date(2013, 1, 1))
assert_equal(testtheshop.favorited_user_count, 1234567890)
# 测试修改部分
theshop = ShopExtendInfo(db, 10000001)
theshop.main_category = "服饰箱包TEST"
theshop.location = "浙江杭州TEST"
theshop.good_item_rate = 10.98
theshop.described_remark = 3.8
theshop.described_remark_compare = -32.13
theshop.service_remark = 3.6
theshop.save()
testtheshop = ShopExtendInfo(db, 10000001)
assert_equal(testtheshop.main_category, "服饰箱包TEST")
assert_equal(testtheshop.location, "浙江杭州TEST")
assert_equal(testtheshop.good_item_rate, 10.98)
assert_equal(testtheshop.described_remark, 3.8)
assert_equal(testtheshop.described_remark_compare, -32.13)
assert_equal(testtheshop.service_remark, 3.6)
# 测试修改全部
theshop = ShopExtendInfo(db, 10000001)
theshop.main_category = "服饰箱包Test2"
theshop.location = "浙江杭州Test2"
theshop.good_item_rate = 13.98
theshop.described_remark = 4.7
theshop.described_remark_compare = 10.13
theshop.service_remark = 4.8
theshop.service_remark_compare = -12.20
theshop.support_returnin7day = 1
theshop.support_cash = 1
theshop.support_consumer_guarantees = 0
theshop.support_credit_card = 1
theshop.open_at = datetime.datetime.strptime("2013-2-1", "%Y-%m-%d")
theshop.favorited_user_count = 1234567891
theshop.save()
testtheshop = ShopExtendInfo(db, 10000001)
assert_equal(testtheshop.main_category, "服饰箱包Test2")
assert_equal(testtheshop.location, "浙江杭州Test2")
assert_equal(testtheshop.good_item_rate, 13.98)
assert_equal(testtheshop.described_remark, 4.7)
assert_equal(testtheshop.described_remark_compare, 10.13)
assert_equal(testtheshop.service_remark, 4.8)
assert_equal(testtheshop.service_remark_compare, -12.20)
assert_equal(testtheshop.support_returnin7day, 1)
assert_equal(testtheshop.support_cash, 1)
assert_equal(testtheshop.support_consumer_guarantees, 0)
assert_equal(testtheshop.support_credit_card, 1)
assert_equal(testtheshop.open_at, datetime.date(2013, 2, 1))
assert_equal(testtheshop.favorited_user_count, 1234567891)
# 测试修改全部
theshop = ShopExtendInfo(db, 10000001)
theshop.main_category = "服饰箱包Test2"
theshop.location = "浙江杭州Test2"
theshop.good_item_rate = 100.00
theshop.described_remark = 4.7
theshop.described_remark_compare = 100.00
theshop.service_remark = 4.8
theshop.service_remark_compare = -100.00
theshop.shipping_remark = 5.0
theshop.shipping_remark_compare = -100.00
theshop.support_returnin7day = 1
theshop.support_cash = 1
theshop.support_consumer_guarantees = 0
#.........这里部分代码省略.........
开发者ID:ljb-2000,项目名称:tb-crawler,代码行数:101,代码来源:taobao_shop_extend.py
示例8: log_init
#!/Library/Frameworks/Python.framework/Versions/2.7/Resources/Python.app/Contents/MacOS/Python
# coding: utf-8
import gflags
import sys
from pygaga.helpers.logger import log_init
from guang_crawler.crawl_image_impl import crawl_image_main
gflags.DEFINE_string('path', "/space/wwwroot/image.guang.j.cn/ROOT/images/", "image path")
gflags.DEFINE_string('org_path', "/space/wwwroot/image.guang.j.cn/ROOT/org_images/", "org image path")
gflags.DEFINE_string('crawl_path', "/space/crawler/image_crawler/static", "image path")
gflags.DEFINE_integer('itemid', 0, "crawl item id")
gflags.DEFINE_integer('numid', 0, "crawl item num id")
gflags.DEFINE_integer('limit', 0, "limit crawl items count")
gflags.DEFINE_string('where', "", "additional where sql, e.g. a=b and c=d")
gflags.DEFINE_boolean('all', False, "crawl all items")
gflags.DEFINE_boolean('pending', False, "crawl pending items")
gflags.DEFINE_boolean('commit', True, "is commit data into database?")
gflags.DEFINE_boolean('removetmp', False, "is remove temperary image files after crawl?")
gflags.DEFINE_boolean('force', False, "is force crawl?")
#gflags.DEFINE_boolean('uploadfastdfs', True, "is upload to fastdfs?")
#gflags.DEFINE_boolean('uploadnfs', False, "is upload to nfs?")
#gflags.DEFINE_boolean('uploadorg', True, "is upload origin image to nfs?")
if __name__ == "__main__":
log_init(["CrawlLogger","urlutils"], "sqlalchemy.*")
crawl_image_main()
开发者ID:iloveyo123u1,项目名称:tb-crawler,代码行数:28,代码来源:crawl_image.py
示例9: len
#import pdb; pdb.set_trace()
numid2volume[long(num_id)] = click_item.item_volume
click_hash = 'jn%s' % click_item.click_hash
r2 = db.execute('select 1 from taobao_report where outer_code="%s"' % click_hash)
if r2.rowcount:
outercode_matched += 1
logger.info("Total click %s creative matched %s outercode matched %s", len(click_items), creative_matched, outercode_matched)
return click_items
def load_pay_items():
logger.info("Loading pay items")
db = get_db_engine()
csv_file = open(FLAGS.pay_input)
csv_reader = csv.reader(csv_file)
header = csv_reader.next()
pay_item_type = namedtuple('PayItemType', 'created name num_id shop_id shop_name count price total_price comm_rate comm tmall_rate tmall_comm total_comm status order_id')
pay_items = []
order_matched = 0
for line in csv_reader:
pay_item = pay_item_type(*line)
pay_items.append(pay_item)
r = db.execute("select 1 from taobao_report where trade_id=%s" % pay_item.order_id)
if r.rowcount:
order_matched += 1
logger.info("Total payed %s order matched %s", len(pay_items), order_matched)
return pay_items
if __name__ == '__main__':
log_init(["GuangLogger","urlutils"], "sqlalchemy.*")
est_main()
开发者ID:iloveyo123u1,项目名称:tb-crawler,代码行数:30,代码来源:estimate_click2pay.py
示例10: globals
app = web.application(urls, globals(), autoreload=True)
class home:
def GET(self):
db = web.database(dbn='mysql', db='guang', user='guang', pw='guang', port=FLAGS.dbport, host=FLAGS.dbhost)
result = db.select("item", what="id,num_id,detail_url,pic_url", where="status=1 and detail_url not like '%s.click.taobao.com%'", order="id desc", limit=40)
ts = int(time.time()*1000)
#import pdb; pdb.set_trace()
msg = APPSECRET + 'app_key' + str(APPKEY) + "timestamp" + str(ts) + APPSECRET
sign = hmac.HMAC(APPSECRET, msg).hexdigest().upper()
web.setcookie('timestamp', str(ts))
web.setcookie('sign', sign)
return render_html("home.htm", {'items' : result,
})
if __name__ == "__main__":
gflags.DEFINE_boolean('webdebug', False, "is web.py debug")
gflags.DEFINE_integer('xtaoport', 8025, "fake qq port")
backup_args = []
backup_args.extend(sys.argv)
sys.argv = [sys.argv[0],] + sys.argv[2:]
log_init('XtaoLogger', "sqlalchemy.*")
sys.argv = backup_args[:2]
web.config.debug = FLAGS.webdebug
if len(sys.argv) == 1:
web.wsgi.runwsgi = lambda func, addr=None: web.wsgi.runfcgi(func, addr)
else:
FLAGS.xtaoport = sys.argv[1]
app.run()
开发者ID:iloveyo123u1,项目名称:tb-crawler,代码行数:29,代码来源:taobao_page.py
示例11: try_query
i = 0
db.autocommit(False)
db.query("set autocommit=0;")
for row in results.fetch_row(maxrows=0):
item_id = row[0]
result = row[1]
is_image_crawled = row[2]
i += 1
if result == 1 and is_image_crawled == 1:
try_query(db, "update item set crawl_status=2 where id=%s" % item_id)
if result == 1 and is_image_crawled == 0:
try_query(db, "update item set crawl_status=1 where id=%s" % item_id)
if result == 0:
try_query(db, "update item set crawl_status=0 where id=%s" % item_id)
if i % 1000 == 0:
logger.debug("processing %s %s %s/%s", row[3], item_id, i, 1194351)
db.commit()
db.commit()
db.close()
if __name__ == "__main__":
# usage: ip:port --daemon --stderr ...
gflags.DEFINE_boolean('daemon', False, "is start in daemon mode?")
log_init('AppLogger', "sqlalchemy.*")
#if FLAGS.daemon:
# file_path = os.path.split(os.path.abspath(__file__))[0]
# daemon.daemonize(os.path.join(file_path, 'app.pid'))
main()
开发者ID:iloveyo123u1,项目名称:tb-crawler,代码行数:28,代码来源:build_item_crawlstatus.py
示例12: float
for batch_item in results:
for iid, item in batch_item.items.items():
try:
counter += 1
item_id = item['req'][0]
item_iid = item['req'][1]
item_price = item['req'][2]
#item_picurl = item['req'][3]
if item['resp']:
if item['resp']['approve_status'] != 'onsale':
logger.debug("Item %s/%s %s %s is offshelf", counter, total, item_id, item_iid)
off_counter += 1
write_db.execute("update item set status=2, modified=now() where id=%s" % item_id)
else:
price = float(item['resp']['price'])
#title = item['resp']['title']
#pic_url = item['resp']['pic_url']
if abs(item_price - price) / (item_price + 0.0000001) > 0.2 or abs(item_price - price) > 2.0:
change_counter += 1
logger.debug("Item %s/%s %s %s price %s -> %s", counter, total, item_id, item_iid, item_price, price)
if FLAGS.commit_price:
write_db.execute("update item set price=%s where id=%s" % (price, item_id))
logger.debug("req %s resp %s", item['req'], item['resp'])
except:
logger.error("update failed %s", traceback.format_exc())
logger.info("Taobao quickupdate, total %s, off %s, price change %s, volume change %s", total, off_counter, change_counter, vol_change_counter)
if __name__ == "__main__":
log_init(['CrawlLogger', 'TaobaoLogger',], "sqlalchemy.*")
crawl_main()
开发者ID:iloveyo123u1,项目名称:tb-crawler,代码行数:30,代码来源:quick_update_taobao_status.py
示例13: taobao_report
continue
sql = """insert into taobao_report (outer_code, commission_rate, item_title, seller_nick,
num_iid, shop_title, app_key, commission, trade_id, pay_time, item_num,
category_id, pay_price, real_pay_fee, category_name, create_time) values (
"%s", "%s", "%s", "%s", %s, "%s", "%s", "%s", %s, "%s", %s, %s, "%s", "%s", "%s", now()
)""" % (
m.get('outer_code', ''), m['commission_rate'].replace('%', '%%'), m['item_title'].replace('%', '%%'),
m['seller_nick'].replace('%', '%%'), m['num_iid'],
m['shop_title'].replace('%', '%%'), m['app_key'], m['commission'], m['trade_id'], m['pay_time'], m['item_num'],
m['category_id'], m['pay_price'], m['real_pay_fee'], m.get('category_name','').replace('%', '%%')
)
logger.debug(sql)
if db:
try:
db.execute(sql)
except:
logger.warn("insert failed sql %s --> err %s", sql, traceback.format_exc())
if csv_w:
writecsv(csv_w, [d, m.get('outer_code', ''), m['commission_rate'], m['item_title'], m['seller_nick'], m['num_iid'],
m['shop_title'], m['app_key'], m['commission'], m['trade_id'], m['pay_time'], m['item_num'],
m['category_id'], m['pay_price'], m['real_pay_fee'], m.get('category_name', '')])
except:
logger.error("Got error %s %s", m, traceback.format_exc())
pageno += 1
except:
logger.error("Got fatal error %s %s", d, traceback.format_exc())
if __name__ == "__main__":
log_init("TaobaoLogger", "sqlalchemy.*")
main()
开发者ID:iloveyo123u1,项目名称:tb-crawler,代码行数:30,代码来源:get_taobao_report.py
示例14: str
loan_obj.rate = str(loan.xpath("td[4]/text()")[0]).strip().replace("%", "")
period = str(loan.xpath("td[5]/text()")[0].encode("UTF-8")).strip().replace(" ", "")
if period.find(loan_obj.PERIOD_UNIT_DAY) > 0:
loan_obj.period = period.replace(loan_obj.PERIOD_UNIT_DAY, "")
loan_obj.period_unit = loan_obj.PERIOD_UNIT_DAY
else:
loan_obj.period = period.replace("个", "").replace(loan_obj.PERIOD_UNIT_MONTH, "")
loan_obj.period_unit = loan_obj.PERIOD_UNIT_MONTH
loan_obj.schedule = float(str(loan.xpath("td[last()]/p[1]/text()")[0].encode("UTF-8")).strip().replace(" ", "").replace("%", "").split("完成")[1])
loan_obj.db_create(db)
logger.info("company %s crawler loan: new size %s, update size %s", company_id, len(new_ids_set), len(update_ids_set))
# db - 新抓取的 = 就是要下线的
off_ids_set = db_ids_set - online_ids_set
if off_ids_set:
loan_obj = Loan(company_id)
loan_obj.db_offline(db, off_ids_set)
logger.info("company %s crawler loan: offline %s", company_id, len(off_ids_set))
except:
logger.error("url: %s xpath failed:%s", url, traceback.format_exc())
if __name__ == "__main__":
log_init("CrawlLogger", "sqlalchemy.*")
crawl()
开发者ID:qiaohui,项目名称:loan_crawler,代码行数:30,代码来源:ppdai.py
示例15: main
#!/usr/bin/env python
# coding: utf-8
import os
import sys
import daemon
import gflags
import logging
from pygaga.helpers.logger import log_init
from pygaga.helpers.dbutils import get_db_engine
logger = logging.getLogger("AppLogger")
FLAGS = gflags.FLAGS
def main():
pass
if __name__ == "__main__":
# usage: ${prog} ip:port --daemon --stderr ...
gflags.DEFINE_boolean("daemon", False, "is start in daemon mode?")
log_init("AppLogger", "sqlalchemy.*")
# if FLAGS.daemon:
# file_path = os.path.split(os.path.abspath(__file__))[0]
# daemon.daemonize(os.path.join(file_path, 'app.pid'))
main()
开发者ID:qiaohui,项目名称:pygaga,代码行数:30,代码来源:app_template.py
示例16: ForwardServer
except Exception, e:
logger.error('*** Failed to connect to %s:%d: %r, wait and retry', self.s_host, self.s_port, e)
logger.info('Now forwarding port %d to %s:%d ...', local_port, remote_host, remote_port)
ForwardServer(('', local_port), SubHander).serve_forever()
def connect_forward(server_host, local_port, remote_host, remote_port, username, server_port=22, keyfile=None, password=None, look_for_keys=True):
try:
forward_tunnel(local_port, remote_host, remote_port, server_host, server_port, username, keyfile, password, look_for_keys)
except KeyboardInterrupt:
return
if __name__ == "__main__":
import gflags
from pygaga.helpers.logger import log_init
FLAGS = gflags.FLAGS
gflags.DEFINE_integer('lport', 3306, "local port")
gflags.DEFINE_integer('rport', 3306, "remote port")
gflags.DEFINE_string('rhost', '192.168.10.42', "remote host")
gflags.DEFINE_string('shost', 'log.j.cn', "server host")
gflags.DEFINE_integer('sport', 22, "server port")
gflags.DEFINE_string('user', 'chuansheng.song', "server username")
FLAGS.stderr = True
FLAGS.verbose = "info"
FLAGS.color = True
log_init("sshforward", "sqlalchemy.*")
connect_forward(FLAGS.shost, FLAGS.lport, FLAGS.rhost, FLAGS.rport, FLAGS.user, FLAGS.sport)
开发者ID:qiaohui,项目名称:pygaga,代码行数:29,代码来源:sshforward.py
示例17: log_init
if __name__ == "__main__":
gflags.DEFINE_string('cookie', "/Users/chris/tmp/qqtest/cookies.txt", "cookie path", short_name="k")
gflags.DEFINE_string('photo', "/Users/chris/tmp/1.jpg", "photo path", short_name="p")
gflags.DEFINE_string('content', "", "post content", short_name="c")
gflags.DEFINE_boolean('daemon', False, "run as daemon")
gflags.DEFINE_boolean('fromdb', True, "post content from db")
gflags.DEFINE_boolean('dryrun', False, "dry run, not post and update db")
gflags.DEFINE_boolean('commitfail', True, "is commit status to database when failed")
gflags.DEFINE_boolean('loop', False, "is loop forever?")
gflags.DEFINE_boolean('timer', False, "is use timer post?")
gflags.DEFINE_boolean('test', False, "is test mode? not post, just check")
gflags.DEFINE_integer('sid', -1, "post one user from db")
gflags.DEFINE_integer('interval', 20, "sleep seconds between post")
gflags.DEFINE_integer('postinterval', 0, "sleep seconds between post")
log_init('QzoneLogger', "sqlalchemy.*")
#log_init('QzoneLogger', "")
if FLAGS.daemon:
if not FLAGS.pidfile:
pidfile = os.path.join(os.path.split(os.path.abspath(__file__))[0], 'post.pid')
else:
pidfile = FLAGS.pidfile
daemon.daemonize(pidfile)
if not FLAGS.fromdb:
result = post_shuoshuo(FLAGS.cookie, FLAGS.photo, FLAGS.content)
if result:
logger.info("Uploading content success")
sys.exit(0)
else:
sys.exit(1)
else:
开发者ID:iloveyo123u1,项目名称:tb-crawler,代码行数:30,代码来源:post.py
示例18: log_init
import logging
from pygaga.helpers.logger import log_init
from pygaga.helpers.dateutils import tomorrow
import gflags
FLAGS = gflags.FLAGS
logger = logging.getLogger('TestLogger')
if __name__ == "__main__":
log_init("TestLogger", "sqlalchemy.*")
print "%s %s %s" % (FLAGS.start, FLAGS.end, FLAGS.date)
logger.debug("debug")
logger.warn("warn")
logger.info("info")
logger.error("error")
开发者ID:qiaohui,项目名称:pygaga,代码行数:17,代码来源:log_demo.py
示例19: GET
return ""
class palette_png:
def GET(self):
web.header("Content-Type", "images/png")
params = web.input()
data = StringIO()
colors_as_image(params.c.split(",")).save(data, 'png')
data.seek(0)
return data.read()
"""
if __name__ == "__main__":
gflags.DEFINE_boolean('daemon', False, "is start in daemon mode?")
gflags.DEFINE_boolean('webdebug', False, "is web.py debug")
gflags.DEFINE_boolean('reload', False, "is web.py reload app")
gflags.DEFINE_string('solr_host', 'sdl-guang-solr4', 'solr host')
backup_args = []
backup_args.extend(sys.argv)
sys.argv = [sys.argv[0],] + sys.argv[2:]
log_init('GuangLogger', "sqlalchemy.*")
sys.argv = backup_args[:2]
web.config.debug = FLAGS.webdebug
if len(sys.argv) == 1:
web.wsgi.runwsgi = lambda func, addr=None: web.wsgi.runfcgi(func, addr)
if FLAGS.daemon:
daemon.daemonize(os.path.join(file_path, 'solrweb.pid'))
#render = web.template.render('templates/', base='layout')
app = web.application(urls, globals(), autoreload=FLAGS.reload)
app.run()
开发者ID:iloveyo123u1,项目名称:tb-crawler,代码行数:30,代码来源:solrweb.py
示例20: mul
def mul(x, y):
'''
>>> mul(2, 4)
8
'''
return x * y
class addcls:
"""
>>> x = addcls(5)
>>> x + 3
>>> x.x
8
"""
def __init__(self, x):
self.x = x
def __add__(self, y):
self.x += y
if __name__ == "__main__":
import doctest
from pygaga.helpers.logger import log_init
log_init()
doctest.testmod(verbose=True)
开发者ID:qiaohui,项目名称:pygaga,代码行数:25,代码来源:doctest_demo.py
注:本文中的pygaga.helpers.logger.log_init函数示例由纯净天空整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。 |
请发表评论