Mercurial > public > src > rhodecode
comparison pylons_app/lib/celerylib/tasks.py @ 530:5c376ac2d4c9 celery
rewrote graph plotting, added zooming and json dump insted of stupid string formating.
author | Marcin Kuzminski <marcin@python-works.com> |
---|---|
date | Sat, 18 Sep 2010 17:03:29 +0200 |
parents | a9e50dce3081 |
children | b12ea84fb906 |
comparison
equal
deleted
inserted
replaced
529:9836541b0509 | 530:5c376ac2d4c9 |
---|---|
9 from pylons_app.lib.utils import OrderedDict | 9 from pylons_app.lib.utils import OrderedDict |
10 from time import mktime | 10 from time import mktime |
11 from vcs.backends.hg import MercurialRepository | 11 from vcs.backends.hg import MercurialRepository |
12 import calendar | 12 import calendar |
13 import traceback | 13 import traceback |
14 import json | |
14 | 15 |
15 __all__ = ['whoosh_index', 'get_commits_stats', | 16 __all__ = ['whoosh_index', 'get_commits_stats', |
16 'reset_user_password', 'send_email'] | 17 'reset_user_password', 'send_email'] |
17 | 18 |
18 def get_session(): | 19 def get_session(): |
82 | 83 |
83 @task | 84 @task |
84 def get_commits_stats(repo): | 85 def get_commits_stats(repo): |
85 log = get_commits_stats.get_logger() | 86 log = get_commits_stats.get_logger() |
86 aggregate = OrderedDict() | 87 aggregate = OrderedDict() |
88 overview_aggregate = OrderedDict() | |
87 repos_path = get_hg_ui_settings()['paths_root_path'].replace('*', '') | 89 repos_path = get_hg_ui_settings()['paths_root_path'].replace('*', '') |
88 repo = MercurialRepository(repos_path + repo) | 90 repo = MercurialRepository(repos_path + repo) |
89 #graph range | 91 #graph range |
90 td = datetime.today() + timedelta(days=1) | 92 td = datetime.today() + timedelta(days=1) |
91 y, m, d = td.year, td.month, td.day | 93 y, m, d = td.year, td.month, td.day |
92 ts_min = mktime((y, (td - timedelta(days=calendar.mdays[m])).month, | 94 |
95 ts_min_y = mktime((y - 1, (td - timedelta(days=calendar.mdays[m])).month, | |
93 d, 0, 0, 0, 0, 0, 0,)) | 96 d, 0, 0, 0, 0, 0, 0,)) |
94 ts_max = mktime((y, m, d, 0, 0, 0, 0, 0, 0,)) | 97 ts_min_m = mktime((y, (td - timedelta(days=calendar.mdays[m])).month, |
95 | 98 d, 0, 0, 0, 0, 0, 0,)) |
99 | |
100 ts_max_y = mktime((y, m, d, 0, 0, 0, 0, 0, 0,)) | |
101 | |
96 def author_key_cleaner(k): | 102 def author_key_cleaner(k): |
97 k = person(k) | 103 k = person(k) |
98 k = k.replace('"', "'") #for js data compatibilty | 104 k = k.replace('"', "") #for js data compatibilty |
99 return k | 105 return k |
100 | 106 |
101 for cs in repo[:200]:#added limit 200 until fix #29 is made | 107 for cs in repo[:1000]:#added limit 200 until fix #29 is made |
102 k = '%s-%s-%s' % (cs.date.timetuple()[0], cs.date.timetuple()[1], | 108 k = '%s-%s-%s' % (cs.date.timetuple()[0], cs.date.timetuple()[1], |
103 cs.date.timetuple()[2]) | 109 cs.date.timetuple()[2]) |
104 timetupple = [int(x) for x in k.split('-')] | 110 timetupple = [int(x) for x in k.split('-')] |
105 timetupple.extend([0 for _ in xrange(6)]) | 111 timetupple.extend([0 for _ in xrange(6)]) |
106 k = mktime(timetupple) | 112 k = mktime(timetupple) |
111 aggregate[author_key_cleaner(cs.author)][k]["changed"] += len(cs.changed) | 117 aggregate[author_key_cleaner(cs.author)][k]["changed"] += len(cs.changed) |
112 aggregate[author_key_cleaner(cs.author)][k]["removed"] += len(cs.removed) | 118 aggregate[author_key_cleaner(cs.author)][k]["removed"] += len(cs.removed) |
113 | 119 |
114 else: | 120 else: |
115 #aggregate[author_key_cleaner(cs.author)].update(dates_range) | 121 #aggregate[author_key_cleaner(cs.author)].update(dates_range) |
116 if k >= ts_min and k <= ts_max: | 122 if k >= ts_min_y and k <= ts_max_y: |
117 aggregate[author_key_cleaner(cs.author)][k] = {} | 123 aggregate[author_key_cleaner(cs.author)][k] = {} |
118 aggregate[author_key_cleaner(cs.author)][k]["commits"] = 1 | 124 aggregate[author_key_cleaner(cs.author)][k]["commits"] = 1 |
119 aggregate[author_key_cleaner(cs.author)][k]["added"] = len(cs.added) | 125 aggregate[author_key_cleaner(cs.author)][k]["added"] = len(cs.added) |
120 aggregate[author_key_cleaner(cs.author)][k]["changed"] = len(cs.changed) | 126 aggregate[author_key_cleaner(cs.author)][k]["changed"] = len(cs.changed) |
121 aggregate[author_key_cleaner(cs.author)][k]["removed"] = len(cs.removed) | 127 aggregate[author_key_cleaner(cs.author)][k]["removed"] = len(cs.removed) |
122 | 128 |
123 else: | 129 else: |
124 if k >= ts_min and k <= ts_max: | 130 if k >= ts_min_y and k <= ts_max_y: |
125 aggregate[author_key_cleaner(cs.author)] = OrderedDict() | 131 aggregate[author_key_cleaner(cs.author)] = OrderedDict() |
126 #aggregate[author_key_cleaner(cs.author)].update(dates_range) | 132 #aggregate[author_key_cleaner(cs.author)].update(dates_range) |
127 aggregate[author_key_cleaner(cs.author)][k] = {} | 133 aggregate[author_key_cleaner(cs.author)][k] = {} |
128 aggregate[author_key_cleaner(cs.author)][k]["commits"] = 1 | 134 aggregate[author_key_cleaner(cs.author)][k]["commits"] = 1 |
129 aggregate[author_key_cleaner(cs.author)][k]["added"] = len(cs.added) | 135 aggregate[author_key_cleaner(cs.author)][k]["added"] = len(cs.added) |
130 aggregate[author_key_cleaner(cs.author)][k]["changed"] = len(cs.changed) | 136 aggregate[author_key_cleaner(cs.author)][k]["changed"] = len(cs.changed) |
131 aggregate[author_key_cleaner(cs.author)][k]["removed"] = len(cs.removed) | 137 aggregate[author_key_cleaner(cs.author)][k]["removed"] = len(cs.removed) |
132 | 138 |
133 d = '' | 139 |
134 tmpl0 = u""""%s":%s""" | 140 if overview_aggregate.has_key(k): |
135 tmpl1 = u"""{label:"%s",data:%s,schema:["commits"]},""" | 141 overview_aggregate[k] += 1 |
142 else: | |
143 overview_aggregate[k] = 1 | |
144 | |
145 overview_data = [] | |
146 for k, v in overview_aggregate.items(): | |
147 overview_data.append([k, v]) | |
148 data = {} | |
136 for author in aggregate: | 149 for author in aggregate: |
137 | 150 data[author] = {"label":author, |
138 d += tmpl0 % (author, | 151 "data":[{"time":x, |
139 tmpl1 \ | 152 "commits":aggregate[author][x]['commits'], |
140 % (author, | 153 "added":aggregate[author][x]['added'], |
141 [{"time":x, | 154 "changed":aggregate[author][x]['changed'], |
142 "commits":aggregate[author][x]['commits'], | 155 "removed":aggregate[author][x]['removed'], |
143 "added":aggregate[author][x]['added'], | 156 } for x in aggregate[author]], |
144 "changed":aggregate[author][x]['changed'], | 157 "schema":["commits"] |
145 "removed":aggregate[author][x]['removed'], | 158 } |
146 } for x in aggregate[author]])) | 159 |
147 if d == '': | 160 if not data: |
148 d = '"%s":{label:"%s",data:[[0,1],]}' \ | 161 data[author_key_cleaner(repo.contact)] = { |
149 % (author_key_cleaner(repo.contact), | 162 "label":author_key_cleaner(repo.contact), |
150 author_key_cleaner(repo.contact)) | 163 "data":[0, 1], |
151 return (ts_min, ts_max, d) | 164 "schema":["commits"], |
165 } | |
166 | |
167 return (ts_min_m, ts_max_y, json.dumps(data), json.dumps(overview_data)) | |
152 | 168 |
153 @task | 169 @task |
154 def reset_user_password(user_email): | 170 def reset_user_password(user_email): |
155 log = reset_user_password.get_logger() | 171 log = reset_user_password.get_logger() |
156 from pylons_app.lib import auth | 172 from pylons_app.lib import auth |
157 from pylons_app.model.db import User | 173 from pylons_app.model.db import User |
158 | 174 |
159 try: | 175 try: |
160 | |
161 try: | 176 try: |
162 sa = get_session() | 177 sa = get_session() |
163 user = sa.query(User).filter(User.email == user_email).scalar() | 178 user = sa.query(User).filter(User.email == user_email).scalar() |
164 new_passwd = auth.PasswordGenerator().gen_password(8, | 179 new_passwd = auth.PasswordGenerator().gen_password(8, |
165 auth.PasswordGenerator.ALPHABETS_BIG_SMALL) | 180 auth.PasswordGenerator.ALPHABETS_BIG_SMALL) |