2012-10-26 12:14:30 +04:00
# -*- coding: UTF-8 -*-
from beaker . cache import Cache
from beaker . util import parse_cache_config_options
import logging
import urllib2 , urllib
2013-05-29 15:33:07 +04:00
import string
2012-10-26 12:14:30 +04:00
from datetime import datetime
from abf . api . exceptions import *
from abf . api . jsn import *
log = logging . getLogger ( ' models ' )
lt_cache = Cache ( ' abf ' , expire = 86400 , type = ' file ' , data_dir = ' /tmp/abf_cache/data ' , lock_dir = ' /tmp/abf_cache/data ' )
st_cache = Cache ( ' abf ' , expire = 3600 , type = ' file ' , data_dir = ' /tmp/abf_cache/data ' , lock_dir = ' /tmp/abf_cache/data ' )
def get_cached ( cache , cache_key , func , * args , * * kwargs ) :
if cache and cache . has_key ( cache_key ) :
val = cache . get ( cache_key )
else :
val = func ( * args , * * kwargs )
if cache :
cache . put ( cache_key , val )
return val
2014-07-15 03:32:53 +04:00
2012-10-26 12:14:30 +04:00
class Model ( object ) :
def __init__ ( self , models , ID = None , init_data = None ) :
self . params_dict = { }
self . ID = ID
self . init_data = init_data
self . models = models
self . stub = True
self . cacher = None
if not ID and not init_data :
2014-09-29 16:18:33 +04:00
raise Exception ( _ ( " At least one of ' ID ' or ' init_data ' parameters have to be set! " ) )
2014-07-15 03:32:53 +04:00
2012-10-26 12:14:30 +04:00
if init_data and ' id ' not in init_data :
2014-09-29 16:18:33 +04:00
raise Exception ( _ ( " Key ' id ' have to present in initial data! " ) )
2014-07-15 03:32:53 +04:00
2012-10-26 12:14:30 +04:00
if ID :
2018-10-23 13:21:55 +03:00
cache_key = ' %s - %s - %s ' % ( self . models . abf_url , self . __class__ . __name__ , ID )
2014-07-15 03:32:53 +04:00
2012-10-26 12:14:30 +04:00
if st_cache and st_cache . has_key ( cache_key ) :
#read cached value
2014-10-02 17:03:15 +04:00
log . debug ( _ ( ' Loading %(name)s %(id)s from cache ' ) % { ' name ' : self . __class__ . __name__ , ' id ' : ID } )
2012-10-26 12:14:30 +04:00
self . stub = False
self . init_data = st_cache . get ( cache_key )
self . load ( )
2014-07-15 03:32:53 +04:00
2012-10-26 12:14:30 +04:00
else :
2014-10-02 17:03:15 +04:00
log . debug ( _ ( ' Loading %(name)s %(id)s using API ' ) % { ' name ' : self . __class__ . __name__ , ' id ' : ID } )
2012-10-26 12:14:30 +04:00
self . stub = False
self . get_init_data ( ID )
self . load ( )
2014-07-15 03:32:53 +04:00
2012-10-26 12:14:30 +04:00
#write to cache
if self . cacher :
self . cacher . put ( cache_key , self . init_data )
2014-09-29 16:18:33 +04:00
log . debug ( _ ( ' Results were cached ' ) )
2014-07-15 03:32:53 +04:00
2012-10-26 12:14:30 +04:00
for field in self . __class__ . required_fields :
if field not in self . params_dict :
2014-09-30 12:19:49 +04:00
raise Exception ( _ ( " One of the fields required for %(name)s model was not specified: %(field)s " ) %
2014-10-15 15:58:22 +04:00
{ ' name ' : self . __class__ . __name__ , ' field ' : field } )
2012-10-26 12:14:30 +04:00
else :
2014-10-02 17:03:15 +04:00
log . debug ( _ ( ' Creating a stub for %(name)s %(id)s ' ) % { ' name ' : self . __class__ . __name__ , ' id ' : self . init_data [ ' id ' ] } )
2012-10-26 12:14:30 +04:00
self . load ( )
self . stub = True
def __getattr__ ( self , key ) :
if key == ' params_dict ' :
return None
if key in self . params_dict :
return self . params_dict [ key ]
if self . __dict__ [ ' stub ' ] :
self . stub = False
obj = self . __class__ ( self . models , ID = self . params_dict [ ' id ' ] )
self . __dict__ = obj . __dict__
#self.load(self.params_dict['id'])
if key in self . params_dict :
return self . params_dict [ key ]
2014-09-29 16:18:33 +04:00
raise KeyError ( _ ( " Key ' %s ' can not be found! " ) % key )
2014-07-15 03:32:53 +04:00
2012-11-23 14:20:54 +04:00
def __eq__ ( self , other ) :
return self . id == other . id
2014-07-15 03:32:53 +04:00
2012-11-23 14:20:54 +04:00
def __ne__ ( self , other ) :
2014-07-15 03:32:53 +04:00
return self . id != other . id
2012-10-26 12:14:30 +04:00
class Platform ( Model ) :
2014-07-15 03:32:53 +04:00
required_fields = [ ' id ' , ' name ' , ' description ' , ' parent_platform_id ' , ' created_at ' , ' updated_at ' , ' released ' ,
2012-10-26 12:14:30 +04:00
' owner ' , ' visibility ' , ' platform_type ' , ' distrib_type ' , ' repositories ' ]
2014-07-16 04:17:03 +04:00
filter_dict = { ' id ' : ' * ' , ' name ' : ' * ' , ' visibility ' : ' * ' , ' owner ' : ' * ' , ' platform_type ' : ' * ' , ' repositories ' : ' * ' , ' page ' : ' 1 ' }
2012-10-26 12:14:30 +04:00
def get_init_data ( self , ID ) :
ID = str ( ID )
2014-09-29 16:18:33 +04:00
log . debug ( _ ( ' Reading platform ' ) + ID )
2012-10-26 12:14:30 +04:00
self . init_data = self . models . jsn . get_platform_by_id ( ID )
self . init_data = self . init_data [ ' platform ' ]
2014-07-15 03:32:53 +04:00
def load ( self ) :
2012-10-26 12:14:30 +04:00
self . params_dict = self . init_data . copy ( )
if ' repositories ' in self . init_data :
repos = self . params_dict [ ' repositories ' ]
repositories = [ ]
for repo in repos :
r = Repository ( self . models , init_data = repo )
r . params_dict [ ' platform ' ] = self
repositories . append ( r )
self . params_dict [ ' repositories ' ] = repositories
if ' owner ' in self . init_data :
self . params_dict [ ' owner ' ] = User ( self . models , init_data = self . init_data [ ' owner ' ] )
if ' created_at ' in self . init_data :
self . params_dict [ ' created_at ' ] = datetime . fromtimestamp ( float ( self . init_data [ ' created_at ' ] ) )
if ' updated_at ' in self . init_data :
self . params_dict [ ' updated_at ' ] = datetime . fromtimestamp ( float ( self . init_data [ ' updated_at ' ] ) )
self . cacher = lt_cache
2014-07-15 03:32:53 +04:00
2012-10-26 12:14:30 +04:00
def __repr__ ( self ) :
return self . name
2014-07-15 03:32:53 +04:00
@staticmethod
2012-10-26 12:14:30 +04:00
def _get_platforms_filtered ( models , typ ) :
2018-10-23 13:21:55 +03:00
cache_key = ' %s __ %s __ %s ' % ( models . abf_url , Platform . __name__ , typ )
2012-10-26 12:14:30 +04:00
platforms = get_cached ( st_cache , cache_key , models . jsn . get_platforms , typ = typ ) [ ' platforms ' ]
output = [ ]
for pl in platforms :
p = Platform ( models , init_data = pl )
output . append ( p )
return output
2014-07-15 03:32:53 +04:00
2012-10-26 12:14:30 +04:00
@staticmethod
def get_user_platforms_main ( models ) :
return Platform . _get_platforms_filtered ( models , ' main ' )
2014-07-15 03:32:53 +04:00
2012-10-26 12:14:30 +04:00
@staticmethod
2012-11-23 14:20:54 +04:00
def get_user_platforms_personal ( models ) :
2012-10-26 12:14:30 +04:00
return Platform . _get_platforms_filtered ( models , ' personal ' )
2014-07-15 03:32:53 +04:00
2012-10-26 12:14:30 +04:00
@staticmethod
def get_build_platforms ( models ) :
2018-10-23 13:21:55 +03:00
cache_key = ' %s __ %s __buil ' % ( models . abf_url , Platform . __name__ )
2012-10-26 12:14:30 +04:00
platforms = get_cached ( st_cache , cache_key , models . jsn . get_build_platforms ) [ ' platforms ' ]
output = [ ]
for pl in platforms :
p = Platform ( models , init_data = pl )
output . append ( p )
return output
2014-07-15 03:32:53 +04:00
2012-11-23 14:20:54 +04:00
@staticmethod
def search ( models , query ) :
res = models . jsn . get_search_results ( ' platforms ' , query )
platforms = res [ ' results ' ] [ ' platforms ' ]
platforms_out = [ ]
for platform in platforms :
p = Platform ( models , init_data = platform )
platforms_out . append ( p )
return platforms_out
2014-07-15 03:32:53 +04:00
@staticmethod
2014-07-16 04:17:03 +04:00
def info ( models ) :
if Platform . filter_dict [ ' page ' ] == ' * ' :
num = 1
while 1 :
res = models . jsn . get_list ( ' platforms ' , num )
if not res [ ' platforms ' ] :
break
platforms + = res [ ' platforms ' ]
num + = 1
else :
res = models . jsn . get_list ( ' platforms ' , Platform . filter_dict [ ' page ' ] )
platforms = res [ ' platforms ' ]
2014-07-15 19:28:56 +04:00
platforms = res [ ' platforms ' ]
2014-07-15 03:32:53 +04:00
platforms_out = [ ]
2014-07-15 19:28:56 +04:00
for platform in platforms :
p = Platform ( models , init_data = platform )
platforms_out . append ( p )
2014-07-16 04:17:03 +04:00
for value in Platform . filter_dict :
if Platform . filter_dict [ value ] != ' * ' and value != ' page ' :
platforms_out = [ i for i in platforms_out if str ( Platform . filter_dict [ value ] ) in str ( i . params_dict [ value ] ) ]
2014-07-15 19:28:56 +04:00
return platforms_out
2014-07-15 03:32:53 +04:00
2012-10-26 12:14:30 +04:00
class Repository ( Model ) :
required_fields = [ ' id ' , ' name ' , ' created_at ' , ' updated_at ' , ' description ' , ' publish_without_qa ' , ' platform ' ]
2014-07-16 04:17:03 +04:00
filter_dict = { ' id ' : ' * ' , ' name ' : ' * ' , ' page ' : ' 1 ' }
2012-10-26 12:14:30 +04:00
def get_init_data ( self , ID ) :
ID = str ( ID )
2014-09-29 16:18:33 +04:00
log . debug ( _ ( ' Reading repository ' ) + str ( ID ) )
2012-10-26 12:14:30 +04:00
self . init_data = self . models . jsn . get_repository_by_id ( ID )
self . init_data = self . init_data [ ' repository ' ]
2014-07-15 03:32:53 +04:00
2012-10-26 12:14:30 +04:00
def load ( self ) :
self . params_dict = self . init_data . copy ( )
if ' platform ' in self . init_data :
self . params_dict [ ' platform ' ] = Platform ( self . models , init_data = self . params_dict [ ' platform ' ] )
if ' created_at ' in self . init_data :
self . params_dict [ ' created_at ' ] = datetime . fromtimestamp ( float ( self . init_data [ ' created_at ' ] ) )
if ' updated_at ' in self . init_data :
self . params_dict [ ' updated_at ' ] = datetime . fromtimestamp ( float ( self . init_data [ ' updated_at ' ] ) )
self . cacher = lt_cache
2014-04-15 16:55:49 +04:00
2012-10-26 12:14:30 +04:00
def __repr__ ( self ) :
return ' %s / %s ' % ( self . platform . name , self . name )
2014-07-15 03:32:53 +04:00
2014-07-15 19:28:56 +04:00
@staticmethod
2014-07-16 04:17:03 +04:00
def info ( models ) :
platform_info = Platform . info ( models )
2014-07-15 19:28:56 +04:00
repo_info = [ ]
for platform in platform_info :
repos = platform . params_dict [ ' repositories ' ]
for repo in repos :
2014-07-16 04:17:03 +04:00
repo_fin = Repository ( models , repo . id )
2014-07-15 19:28:56 +04:00
repo_info . append ( repo_fin )
2014-07-16 04:17:03 +04:00
for value in Repository . filter_dict :
if Repository . filter_dict [ value ] != ' * ' and value != ' page ' :
repo_info = [ i for i in repo_info if str ( Repository . filter_dict [ value ] ) in str ( i . params_dict [ value ] ) ]
2014-07-15 19:28:56 +04:00
return repo_info
2012-10-26 12:14:30 +04:00
class Arch ( Model ) :
required_fields = [ ' id ' , ' name ' ]
def get_init_data ( self , ID ) :
ID = str ( ID )
2014-09-29 16:18:33 +04:00
log . debug ( _ ( ' Reading arch ' ) + str ( ID ) )
2012-10-26 12:14:30 +04:00
arches = self . models . jsn . get_architectures ( ) [ ' architectures ' ]
self . init_data = None
for arch in arches :
if str ( arch [ ' id ' ] ) == ID :
self . init_data = arch
if not self . init_data :
2014-09-29 16:18:33 +04:00
raise Exception ( _ ( " Architecture with id %s not found! " ) % ID )
2014-07-15 03:32:53 +04:00
2012-10-26 12:14:30 +04:00
def load ( self ) :
self . params_dict = self . init_data
self . cacher = lt_cache
2014-07-15 03:32:53 +04:00
2012-10-26 12:14:30 +04:00
@staticmethod
def get_arches ( models ) :
2018-10-23 13:21:55 +03:00
cache_key = ' %s __ %s __all ' % ( models . abf_url , Arch . __name__ )
2012-10-26 12:14:30 +04:00
arches = get_cached ( st_cache , cache_key , models . jsn . get_architectures ) [ ' architectures ' ]
output = [ ]
for arch in arches :
a = Arch ( models , init_data = arch )
output . append ( a )
return output
2014-07-15 03:32:53 +04:00
2012-11-22 17:18:41 +04:00
@staticmethod
def get_arch_by_name ( models , name ) :
2018-10-23 13:21:55 +03:00
cache_key = ' %s __ %s __all ' % ( models . abf_url , Arch . __name__ )
2012-11-22 17:18:41 +04:00
arches = get_cached ( st_cache , cache_key , models . jsn . get_architectures ) [ ' architectures ' ]
for arch in arches :
if arch [ ' name ' ] == name :
a = Arch ( models , init_data = arch )
return a
return None
2014-07-15 03:32:53 +04:00
2012-10-26 12:14:30 +04:00
def __repr__ ( self ) :
return self . name
2014-07-15 03:32:53 +04:00
2012-10-26 12:14:30 +04:00
class User ( Model ) :
required_fields = [ ' id ' , ' name ' , ' email ' , ' language ' , ' professional_experience ' , ' site ' , ' company ' , ' location ' ,
' uname ' , ' own_projects_count ' , ' build_priority ' , ' created_at ' , ' updated_at ' , ' avatar_url ' , ' html_url ' ]
def get_init_data ( self , ID ) :
ID = str ( ID )
2014-09-29 16:18:33 +04:00
log . debug ( _ ( ' Reading user ' ) + ID )
2012-10-26 12:14:30 +04:00
self . init_data = self . models . jsn . get_user_by_id ( ID )
self . init_data = self . init_data [ ' user ' ]
2014-07-15 03:32:53 +04:00
def load ( self ) :
2012-10-26 12:14:30 +04:00
self . params_dict = self . init_data . copy ( )
2014-07-15 03:32:53 +04:00
2012-10-26 12:14:30 +04:00
if ' created_at ' in self . init_data :
self . params_dict [ ' created_at ' ] = datetime . fromtimestamp ( float ( self . init_data [ ' created_at ' ] ) )
if ' updated_at ' in self . init_data :
self . params_dict [ ' updated_at ' ] = datetime . fromtimestamp ( float ( self . init_data [ ' updated_at ' ] ) )
2014-07-15 03:32:53 +04:00
2012-10-26 12:14:30 +04:00
self . cacher = lt_cache
2014-07-15 03:32:53 +04:00
2012-11-23 14:20:54 +04:00
@staticmethod
def search ( models , query ) :
res = models . jsn . get_search_results ( ' users ' , query )
users = res [ ' results ' ] [ ' users ' ]
users_out = [ ]
for user in users :
u = User ( models , init_data = user )
users_out . append ( u )
return users_out
2014-07-15 03:32:53 +04:00
2012-10-31 18:58:23 +04:00
def __repr__ ( self ) :
return self . uname
2014-07-15 03:32:53 +04:00
2012-10-31 18:58:23 +04:00
class Group ( Model ) :
required_fields = [ ' id ' , ' uname ' , ' own_projects_count ' , ' created_at ' , ' updated_at ' , ' description ' , ' owner ' ,
' avatar_url ' , ' html_url ' ]
def get_init_data ( self , ID ) :
ID = str ( ID )
2014-09-29 16:18:33 +04:00
log . debug ( _ ( ' Reading group ' ) + ID )
2012-10-31 18:58:23 +04:00
self . init_data = self . models . jsn . get_group_by_id ( ID )
self . init_data = self . init_data [ ' group ' ]
2014-07-15 03:32:53 +04:00
def load ( self ) :
2012-10-31 18:58:23 +04:00
self . params_dict = self . init_data . copy ( )
2014-07-15 03:32:53 +04:00
2012-10-31 18:58:23 +04:00
if ' created_at ' in self . init_data :
self . params_dict [ ' created_at ' ] = datetime . fromtimestamp ( float ( self . init_data [ ' created_at ' ] ) )
if ' updated_at ' in self . init_data :
self . params_dict [ ' updated_at ' ] = datetime . fromtimestamp ( float ( self . init_data [ ' updated_at ' ] ) )
2014-07-15 03:32:53 +04:00
2012-10-31 18:58:23 +04:00
if ' owner ' in self . params_dict :
self . params_dict [ ' owner ' ] = User ( self . models , init_data = self . params_dict [ ' owner ' ] )
2014-07-15 03:32:53 +04:00
2012-10-26 12:14:30 +04:00
self . cacher = lt_cache
2014-07-15 03:32:53 +04:00
2012-11-23 14:20:54 +04:00
@staticmethod
def search ( models , query ) :
res = models . jsn . get_search_results ( ' groups ' , query )
groups = res [ ' results ' ] [ ' groups ' ]
groups_out = [ ]
for group in groups :
g = Group ( models , init_data = group )
groups_out . append ( g )
return groups_out
2014-07-15 03:32:53 +04:00
2012-10-26 12:14:30 +04:00
def __repr__ ( self ) :
return self . uname
2014-07-15 03:32:53 +04:00
2012-10-26 12:14:30 +04:00
class Project ( Model ) :
2018-10-23 13:21:55 +03:00
required_fields = [ ' id ' , ' name ' , ' fullname ' , ' git_url ' , ' created_at ' , ' updated_at ' , ' visibility ' , ' ancestry ' ,
' default_branch ' , ' is_package ' , ' owner ' , ' repositories ' , ' owner_type ' , ' maintainer ' , ' project_statistics ' , ]
2014-07-16 04:17:03 +04:00
filter_dict = { ' id ' : ' * ' , ' name ' : ' * ' , ' page ' : ' 1 ' }
2012-10-26 12:14:30 +04:00
def get_init_data ( self , proj_id ) :
2014-09-29 16:18:33 +04:00
log . debug ( _ ( " Reading project " ) + str ( proj_id ) )
2012-10-26 12:14:30 +04:00
self . init_data = self . models . jsn . get_project_by_id ( proj_id )
self . init_data = self . init_data [ ' project ' ]
2014-07-15 03:32:53 +04:00
2012-10-26 12:14:30 +04:00
@staticmethod
def get_by_name ( models , key ) :
''' key is a pair (owner_name, project_name), or just owner_name/project_name '''
if type ( key ) is unicode or type ( key ) is str :
items = key . split ( ' / ' )
if len ( items ) != 2 :
2014-09-29 16:18:33 +04:00
raise Exception ( _ ( ' Invalid key: ' ) + key )
2012-10-26 12:14:30 +04:00
key = ( items [ 0 ] , items [ 1 ] )
res = models . jsn . get_project_id_by_name ( key )
proj_id = res [ ' project ' ] [ ' id ' ]
return Project ( models , ID = proj_id )
2014-07-15 03:32:53 +04:00
2012-10-26 12:14:30 +04:00
def load ( self ) :
self . params_dict = self . init_data . copy ( )
if ' repositories ' in self . init_data :
repos = self . params_dict [ ' repositories ' ]
self . params_dict [ ' repositories ' ] = [ ]
for repo in repos :
r = Repository ( self . models , init_data = repo )
self . params_dict [ ' repositories ' ] . append ( r )
if ' owner ' in self . init_data :
2012-10-31 18:58:23 +04:00
self . params_dict [ ' owner_type ' ] = self . init_data [ ' owner ' ] [ ' type ' ]
if self . params_dict [ ' owner_type ' ] == ' User ' :
self . params_dict [ ' owner ' ] = User ( self . models , init_data = self . init_data [ ' owner ' ] )
elif self . params_dict [ ' owner_type ' ] == ' Group ' :
self . params_dict [ ' owner ' ] = Group ( self . models , init_data = self . init_data [ ' owner ' ] )
2014-12-16 11:59:03 +03:00
if ' ancestry ' in self . init_data :
ancestry = self . params_dict [ ' ancestry ' ]
if ancestry :
self . params_dict [ ' ancestry ' ] = [ ]
items = ancestry . split ( ' / ' )
for item in items :
anc_proj = self . models . jsn . get_project_by_id ( item )
self . params_dict [ ' ancestry ' ] . append ( anc_proj [ ' project ' ] [ ' fullname ' ] )
2012-10-26 12:14:30 +04:00
if ' created_at ' in self . init_data :
self . params_dict [ ' created_at ' ] = datetime . fromtimestamp ( float ( self . init_data [ ' created_at ' ] ) )
if ' updated_at ' in self . init_data :
self . params_dict [ ' updated_at ' ] = datetime . fromtimestamp ( float ( self . init_data [ ' updated_at ' ] ) )
2014-07-15 03:32:53 +04:00
2012-10-26 12:14:30 +04:00
self . cacher = st_cache
2014-07-15 03:32:53 +04:00
2012-11-23 14:20:54 +04:00
@staticmethod
def search ( models , query ) :
res = models . jsn . get_search_results ( ' projects ' , query )
projs = res [ ' results ' ] [ ' projects ' ]
projects_out = [ ]
for proj in projs :
pr = Project ( models , init_data = proj )
projects_out . append ( pr )
return projects_out
2014-07-15 03:32:53 +04:00
2012-10-26 12:14:30 +04:00
def __repr__ ( self ) :
2012-11-23 14:20:54 +04:00
if ' owner ' in self . params_dict :
2014-07-15 03:32:53 +04:00
return ' %s / %s ' % ( self . owner . uname , self . name )
2012-11-23 14:20:54 +04:00
else :
return self . fullname
2014-07-15 03:32:53 +04:00
2012-10-26 12:14:30 +04:00
def get_refs_list ( self , models ) :
return self . models . jsn . get_git_refs_list ( self . id ) [ ' refs_list ' ]
2014-07-15 03:32:53 +04:00
2014-07-15 19:28:56 +04:00
@staticmethod
2014-07-16 04:17:03 +04:00
def info ( models ) :
repo_info = Repository . info ( models )
2014-07-15 19:28:56 +04:00
projects_info = [ ]
2014-07-16 04:17:03 +04:00
projs = [ ]
2014-07-15 19:28:56 +04:00
for repo in repo_info :
2014-07-16 04:17:03 +04:00
if Project . filter_dict [ ' page ' ] == ' * ' :
num = 1
while 1 :
p = models . jsn . get_projects_single ( repo . id , num )
if not p [ ' repository ' ] [ ' projects ' ] :
break
projs + = p [ ' repository ' ] [ ' projects ' ]
num + = 1
else :
p = models . jsn . get_projects_single ( repo . id , Project . filter_dict [ ' page ' ] )
projs = p [ ' repository ' ] [ ' projects ' ]
2014-07-15 19:28:56 +04:00
for proj in projs :
pr = Project ( models , init_data = proj )
projects_info . append ( pr )
2014-07-16 04:17:03 +04:00
for value in Project . filter_dict :
if Project . filter_dict [ value ] != ' * ' and value != ' page ' :
projects_info = [ i for i in projects_info if str ( Project . filter_dict [ value ] ) in str ( i . params_dict [ value ] ) ]
2014-07-15 19:28:56 +04:00
return projects_info
2014-10-15 15:58:22 +04:00
@staticmethod
def update ( models , project , name , description , visibility , is_package , default_branch ,
2017-03-11 18:02:31 -05:00
has_issues , has_wiki , publish_i686_into_x86_64 , maintainer_id ) :
2014-10-15 15:58:22 +04:00
DATA = {
' id ' : project . id ,
' name ' : name ,
' description ' : description ,
' visibility ' : visibility ,
' is_package ' : is_package ,
' default_branch ' : default_branch ,
' has_issues ' : has_issues ,
' has_wiki ' : has_wiki ,
' publish_i686_into_x86_64 ' : publish_i686_into_x86_64 ,
2014-11-26 11:09:40 +03:00
' maintainer_id ' : maintainer_id ,
2014-10-15 15:58:22 +04:00
}
log . debug ( _ ( ' Updating project settings: ' ) + str ( DATA ) )
try :
result = models . jsn . update_project ( { ' project ' : DATA } , project . id )
2018-10-23 13:21:55 +03:00
except BadRequestError as ex :
2014-10-15 15:58:22 +04:00
log . error ( _ ( ' Sorry, but something went wrong and request I \' ve sent to ABF is bad. Please, '
' notify the console-client developers. Send them a set of command-line arguments and the request data: \n %s ' ) % DATA )
exit ( 1 )
log . info ( _ ( " Successfully updated settings of project %s . " ) % ( project . name ) )
2012-10-26 12:14:30 +04:00
class BuildList ( Model ) :
2013-05-17 12:11:20 +04:00
required_fields = [ ' id ' , ' container_path ' , ' status ' , ' status_string ' , ' package_version ' , ' project ' , ' created_at ' , ' updated_at ' ,
2014-02-27 17:41:27 +04:00
' build_for_platform ' , ' save_to_repository ' , ' arch ' , ' update_type ' , ' extra_repositories ' ,
' commit_hash ' , ' duration ' , ' include_repos ' , ' priority ' , ' build_log_url ' , ' advisory ' , ' mass_build ' , ' log_url ' , ' chroot_tree ' ]
2014-07-15 03:32:53 +04:00
2012-10-26 12:14:30 +04:00
status_by_id = {
0 : ' build complete ' ,
1 : ' platform not found ' ,
2 : ' platform pending ' ,
3 : ' project not found ' ,
4 : ' project version not found ' ,
2014-09-23 14:32:44 +04:00
6 : ' project source error ' ,
555 : ' dependencies error ' ,
2012-10-26 12:14:30 +04:00
666 : ' build error ' ,
2014-09-23 14:32:44 +04:00
777 : ' packages fail ' ,
2012-10-26 12:14:30 +04:00
2000 : ' build pending ' ,
2014-09-23 14:32:44 +04:00
2500 : ' rerun tests ' ,
2550 : ' rerunning tests ' ,
2012-10-26 12:14:30 +04:00
3000 : ' build started ' ,
4000 : ' waiting for response ' ,
5000 : ' build canceled ' ,
6000 : ' build has been published ' ,
7000 : ' build is being published ' ,
8000 : ' publishing error ' ,
9000 : ' publishing rejected ' ,
2013-06-25 12:48:41 +04:00
10000 : ' build is canceling ' ,
2013-06-25 12:40:09 +04:00
11000 : ' tests failed ' ,
2014-03-06 23:08:02 +04:00
12000 : ' [testing] Build has been published ' ,
13000 : ' [testing] Build is being published ' ,
2014-09-23 14:32:44 +04:00
14000 : ' [testing] Publishing error ' ,
15000 : ' unpermitted architecture '
2012-10-26 12:14:30 +04:00
}
status_by_name = dict ( [ ( status_by_id [ x ] , x ) for x in status_by_id ] )
2014-03-06 23:08:02 +04:00
final_statuses = [ 1 , 2 , 3 , 4 , 666 , 5000 , 6000 , 8000 , 9000 , 12000 , 14000 ]
2014-07-15 03:32:53 +04:00
2014-10-31 14:26:50 +04:00
container_status_by_id = {
4000 : ' waiting for request for publishing container ' ,
6000 : ' container has been published ' ,
7000 : ' container is being published ' ,
8000 : ' publishing error '
}
container_status_by_name = dict ( [ ( container_status_by_id [ x ] , x ) for x in container_status_by_id ] )
2012-10-26 12:14:30 +04:00
def get_init_data ( self , ID ) :
ID = str ( ID )
2014-09-29 16:18:33 +04:00
log . debug ( _ ( ' Reading buildlist ' ) + str ( ID ) )
2012-10-26 12:14:30 +04:00
self . init_data = self . models . jsn . get_buildlist_by_id ( ID )
self . init_data = self . init_data [ ' build_list ' ]
2014-07-15 03:32:53 +04:00
def load ( self ) :
2012-10-26 12:14:30 +04:00
self . params_dict = self . init_data . copy ( )
self . params_dict [ ' project ' ] = Project ( self . models , init_data = self . params_dict [ ' project ' ] )
self . params_dict [ ' arch ' ] = Arch ( self . models , init_data = self . params_dict [ ' arch ' ] )
self . params_dict [ ' save_to_repository ' ] = Repository ( self . models , init_data = self . params_dict [ ' save_to_repository ' ] )
self . params_dict [ ' build_for_platform ' ] = Platform ( self . models , init_data = self . params_dict [ ' build_for_platform ' ] )
2013-09-03 18:18:39 +04:00
2012-10-26 12:14:30 +04:00
include_repos = self . params_dict [ ' include_repos ' ]
self . params_dict [ ' include_repos ' ] = [ ]
for rep in include_repos :
r = Repository ( self . models , init_data = rep )
self . params_dict [ ' include_repos ' ] . append ( r )
2013-05-29 15:33:07 +04:00
2013-05-31 12:45:47 +04:00
extra_repositories = self . params_dict [ ' extra_repositories ' ]
self . params_dict [ ' extra_repositories ' ] = [ ]
for rep in extra_repositories :
2013-05-29 15:33:07 +04:00
r = Repository ( self . models , init_data = rep )
2013-05-31 12:45:47 +04:00
self . params_dict [ ' extra_repositories ' ] . append ( r )
2013-09-03 18:18:39 +04:00
self . params_dict [ ' owner_type ' ] = ' Group '
2012-10-26 12:14:30 +04:00
if ' created_at ' in self . init_data :
self . params_dict [ ' created_at ' ] = datetime . fromtimestamp ( float ( self . init_data [ ' created_at ' ] ) )
if ' updated_at ' in self . init_data :
self . params_dict [ ' updated_at ' ] = datetime . fromtimestamp ( float ( self . init_data [ ' updated_at ' ] ) )
2014-02-27 17:41:27 +04:00
self . params_dict [ ' chroot_tree ' ] = ' '
2013-09-03 18:18:39 +04:00
if self . init_data [ ' logs ' ] :
2017-03-11 18:02:31 -05:00
self . params_dict [ ' log_url ' ] = self . init_data [ ' logs ' ] [ 0 ] [ ' url ' ] + ' .log?show=True '
for log in self . init_data [ ' logs ' ] :
if log [ " file_name " ] == " chroot-tree.log " :
self . params_dict [ " chroot_tree " ] = log [ " url " ] + ' .log?show=True '
2013-09-03 18:18:39 +04:00
else :
2017-03-11 18:02:31 -05:00
self . params_dict [ ' log_url ' ] = ' '
2013-09-03 18:18:39 +04:00
2012-10-26 12:14:30 +04:00
self . params_dict [ ' status_string ' ] = BuildList . status_by_id [ self . params_dict [ ' status ' ] ]
if self . params_dict [ ' status ' ] in BuildList . final_statuses :
2013-09-03 18:18:39 +04:00
self . cacher = lt_cache
2014-10-31 14:26:50 +04:00
self . params_dict [ ' container_status_string ' ] = BuildList . container_status_by_id [ self . params_dict [ ' container_status ' ] ]
2013-09-03 18:18:39 +04:00
2012-10-26 12:14:30 +04:00
def __repr__ ( self ) :
2014-10-31 14:26:50 +04:00
return ' %s ( %s : %s - %s ) ' % ( self . id , self . project . fullname ,
2012-10-26 12:14:30 +04:00
self . arch . name , self . status_string )
2013-09-03 18:18:39 +04:00
2012-10-26 12:14:30 +04:00
update_types = [ ' security ' , ' bugfix ' , ' enhancement ' , ' recommended ' , ' newpackage ' ]
2015-05-05 11:55:50 +03:00
external_nodes_vals = [ ' none ' , ' everything ' , ' owned ' ]
2014-03-06 23:08:02 +04:00
auto_publish_statuses = [ ' default ' , ' none ' , ' testing ' ]
2012-10-26 12:14:30 +04:00
@staticmethod
2014-09-29 16:18:33 +04:00
def new_build_task ( models ,
2017-03-11 18:02:31 -05:00
project ,
save_to_repository ,
repositories ,
commit_hash ,
project_version ,
update_type ,
auto_publish_status ,
arches , skip_personal ,
cached_chroot ,
save_chroot ,
auto_create_container ,
include_testing_subrepo ,
use_extra_tests ,
extra_build_lists ,
external_nodes ) :
2015-05-05 11:55:50 +03:00
if external_nodes == ' none ' :
DATA = {
' project_id ' : project . id ,
' commit_hash ' : commit_hash ,
' update_type ' : update_type ,
' save_to_repository_id ' : save_to_repository . id ,
' build_for_platform_id ' : None ,
' auto_publish_status ' : auto_publish_status ,
' project_version ' : project_version ,
' auto_create_container ' : auto_create_container ,
' use_cached_chroot ' : cached_chroot ,
' save_buildroot ' : save_chroot ,
' arch_id ' : None ,
' include_repos ' : [ ] ,
' extra_repositories ' : [ ] ,
' extra_build_lists ' : extra_build_lists ,
' include_testing_subrepository ' : include_testing_subrepo ,
' use_extra_tests ' : use_extra_tests
}
else :
DATA = {
' project_id ' : project . id ,
' commit_hash ' : commit_hash ,
' update_type ' : update_type ,
' save_to_repository_id ' : save_to_repository . id ,
' build_for_platform_id ' : None ,
' auto_publish_status ' : auto_publish_status ,
' project_version ' : project_version ,
' auto_create_container ' : auto_create_container ,
' use_cached_chroot ' : cached_chroot ,
' save_buildroot ' : save_chroot ,
' arch_id ' : None ,
' include_repos ' : [ ] ,
' extra_repositories ' : [ ] ,
' extra_build_lists ' : extra_build_lists ,
' include_testing_subrepository ' : include_testing_subrepo ,
' use_extra_tests ' : use_extra_tests ,
' external_nodes ' : external_nodes
}
2012-10-26 12:14:30 +04:00
build_platforms = { }
2013-05-29 15:33:07 +04:00
if not skip_personal and string . find ( save_to_repository . platform . name , " _personal " ) > 0 :
2013-05-31 12:45:47 +04:00
DATA [ ' extra_repositories ' ] . append ( save_to_repository . id )
2013-05-29 15:33:07 +04:00
2012-10-26 12:14:30 +04:00
for repo in repositories :
if repo . platform . name not in build_platforms :
build_platforms [ repo . platform . name ] = { }
build_platforms [ repo . platform . name ] [ ' repositories ' ] = [ ]
build_platforms [ repo . platform . name ] [ ' id ' ] = repo . platform . id
build_platforms [ repo . platform . name ] [ ' repositories ' ] . append ( repo . id )
2014-07-15 03:32:53 +04:00
build_ids = [ ]
2012-10-26 12:14:30 +04:00
for bpl in build_platforms :
DATA [ ' build_for_platform_id ' ] = build_platforms [ bpl ] [ ' id ' ]
DATA [ ' include_repos ' ] = build_platforms [ bpl ] [ ' repositories ' ]
2013-05-29 15:33:07 +04:00
2012-10-26 12:14:30 +04:00
for arch in arches :
DATA [ ' arch_id ' ] = arch . id
2014-09-29 16:18:33 +04:00
log . debug ( _ ( ' Sending the build task: ' ) + str ( DATA ) )
2012-10-26 12:14:30 +04:00
try :
result = models . jsn . new_build_task ( { ' build_list ' : DATA } )
2018-10-23 13:21:55 +03:00
except BadRequestError as ex :
2014-09-29 16:18:33 +04:00
log . error ( _ ( ' Sorry, but something went wrong and request I \' ve sent to ABF is bad. Please, '
' notify the console-client developers. Send them a set of command-line arguments and the request data: \n %s ' ) % DATA )
2012-10-26 12:14:30 +04:00
exit ( 1 )
2019-02-24 18:02:59 -05:00
if result [ ' build_list ' ] [ ' id ' ] is not None :
log . info ( _ ( " Task %(proj)s | %(plat)s | %(save_repo)s | %(arch)s has been sent. Build task id is %(id)s " ) %
{ ' proj ' : project , ' plat ' : bpl , ' save_repo ' : save_to_repository , ' arch ' : arch , ' id ' : result [ ' build_list ' ] [ ' id ' ] } )
else :
log . info ( _ ( " Build request %(proj)s | %(plat)s | %(save_repo)s | %(arch)s has failed. \n Reason: %(reason)s " ) %
{ ' proj ' : project , ' plat ' : bpl , ' save_repo ' : save_to_repository , ' arch ' : arch , ' reason ' : result [ ' build_list ' ] [ ' message ' ] } )
2012-10-26 12:14:30 +04:00
build_ids . append ( result [ ' build_list ' ] [ ' id ' ] )
return build_ids
2014-07-15 03:32:53 +04:00
2012-10-26 12:14:30 +04:00
def publish ( self ) :
try :
2014-09-29 16:18:33 +04:00
log . info ( _ ( " Publishing the project %s ... " ) % self . id )
2012-10-26 12:14:30 +04:00
result = self . models . jsn . publish ( self . id )
if result [ ' is_published ' ] :
log . info ( result [ ' message ' ] )
else :
log . error ( result [ ' message ' ] )
2014-02-27 17:41:27 +04:00
2012-10-26 12:14:30 +04:00
return result
2018-10-23 13:21:55 +03:00
except BadRequestError as ex :
2014-09-29 16:18:33 +04:00
log . error ( _ ( ' Sorry, but something went wrong and request I \' ve sent to ABF is bad. Please, '
' notify the console-client developers. Send them a set of command-line arguments and the request data: \n %s ' ) % DATA )
2012-10-26 12:14:30 +04:00
exit ( 1 )
2014-02-27 17:41:27 +04:00
2013-09-03 18:18:39 +04:00
class PullRequest ( Model ) :
required_fields = [ ' title ' , ' body ' , ' to_ref ' , ' from_ref ' ]
2014-02-27 17:41:27 +04:00
def load ( self ) :
2013-09-03 18:18:39 +04:00
self . params_dict = self . init_data . copy ( )
self . params_dict [ ' project ' ] = Project ( self . models , init_data = self . params_dict [ ' project ' ] )
def __repr__ ( self ) :
return ' %s : %s - %s ' % ( self . title , self . to_ref ,
self . from_ref )
@staticmethod
2014-12-13 22:10:29 +04:00
def new_pull_request ( models , project , dest_project , title , body , to_ref , from_ref ) :
2013-09-03 18:18:39 +04:00
DATA = {
' from_project_id ' : project . id ,
' title ' : title ,
' body ' : body ,
' to_ref ' : to_ref ,
' from_ref ' : from_ref ,
}
2014-09-29 16:18:33 +04:00
log . debug ( _ ( ' Sending pull request: ' ) + str ( DATA ) )
2013-09-03 18:18:39 +04:00
try :
#continue
2014-12-13 22:10:29 +04:00
result = models . jsn . new_pull_request ( { ' pull_request ' : DATA } , dest_project . id )
2018-10-23 13:21:55 +03:00
except BadRequestError as ex :
2014-09-29 16:18:33 +04:00
log . error ( _ ( ' Sorry, but something went wrong and request I \' ve sent to ABF is bad. Please, '
' notify the console-client developers. Send them a set of command-line arguments and the request data: \n %s ' ) % DATA )
2013-09-03 18:18:39 +04:00
exit ( 1 )
2014-12-13 22:10:29 +04:00
log . info ( _ ( " Pull request from %(proj)s / %(from)s to %(dest_proj)s / %(to)s has been sent. " ) % { ' proj ' : project , ' from ' : from_ref , ' to ' : to_ref , ' dest_proj ' : dest_project } )
2013-09-03 18:18:39 +04:00
2014-02-27 17:41:27 +04:00
class ProjectCreator ( Model ) :
required_fields = [ ' name ' , ' description ' , ' owner ' ]
def load ( self ) :
self . params_dict = self . init_data . copy ( )
self . params_dict [ ' project ' ] = Project ( self . models , init_data = self . params_dict [ ' project ' ] )
def __repr__ ( self ) :
return ' %s ( %s ) ' % ( self . name , self . owner )
@staticmethod
2015-12-03 15:07:46 +03:00
def new_project ( models , name , description , owner_id , owner_type , visibility = ' public ' ) :
# WebUI and API use different terms for visibility: Public/Private vs Open/Hidden
if visibility == " public " :
vis = " open "
else :
vis = " hidden "
2014-02-27 17:41:27 +04:00
DATA = {
' name ' : name ,
' owner_id ' : owner_id ,
' owner_type ' : owner_type ,
2015-12-03 15:07:46 +03:00
' visibility ' : vis ,
2014-02-27 17:41:27 +04:00
' description ' : description ,
' is_package ' : ' true ' ,
' default_branch ' : ' master ' ,
' has_issues ' : ' true ' ,
' has_wiki ' : ' false ' ,
}
2014-09-29 16:18:33 +04:00
log . debug ( _ ( ' Creating project: ' ) + str ( DATA ) )
2014-02-27 17:41:27 +04:00
try :
result = models . jsn . new_project ( { ' project ' : DATA } )
2018-10-23 13:21:55 +03:00
except BadRequestError as ex :
2014-09-29 16:18:33 +04:00
log . error ( _ ( ' Sorry, but something went wrong and request I \' ve sent to ABF is bad. Please, '
' notify the console-client developers. Send them a set of command-line arguments and the request data: \n %s ' ) % DATA )
2014-02-27 17:41:27 +04:00
exit ( 1 )
2014-10-02 17:03:15 +04:00
log . info ( _ ( " The project %(name)s for owner %(owner)d has been created. " ) % { ' name ' : name , ' owner ' : owner_id } )
2014-02-27 17:41:27 +04:00
@staticmethod
def add_project_to_repo ( models , repo_id , project_id ) :
DATA = {
' project_id ' : project_id ,
}
2014-09-29 16:18:33 +04:00
log . debug ( _ ( ' Adding project to repository: ' ) + str ( DATA ) )
2014-02-27 17:41:27 +04:00
try :
2014-02-28 13:11:11 +04:00
result = models . jsn . add_project_to_repo ( DATA , repo_id )
2018-10-23 13:21:55 +03:00
except BadRequestError as ex :
2014-09-29 16:18:33 +04:00
log . error ( _ ( ' Sorry, but something went wrong and request I \' ve sent to ABF is bad. Please, '
' notify the console-client developers. Send them a set of command-line arguments and the request data: \n %s ' ) % DATA )
2014-02-27 17:41:27 +04:00
exit ( 1 )
2014-10-02 17:03:15 +04:00
log . info ( _ ( " The project %(project)d has been added to repository %(repo)d . " ) % { ' project ' : project_id , ' repo ' : repo_id } )
2014-04-15 16:55:49 +04:00
# Would be nice to invalidate only record corresponding to our project...
models . clear_cache ( )
2014-02-27 17:41:27 +04:00
@staticmethod
def remove_project_from_repo ( models , repo_id , project_id ) :
DATA = {
' project_id ' : project_id ,
}
2014-09-29 16:18:33 +04:00
log . debug ( _ ( ' Removing project from repository: ' ) + str ( DATA ) )
2014-02-27 17:41:27 +04:00
try :
2014-02-28 13:11:11 +04:00
result = models . jsn . remove_project_from_repo ( DATA , repo_id )
2018-10-23 13:21:55 +03:00
except BadRequestError as ex :
2014-09-29 16:18:33 +04:00
log . error ( _ ( ' Sorry, but something went wrong and request I \' ve sent to ABF is bad. Please, '
' notify the console-client developers. Send them a set of command-line arguments and the request data: \n %s ' ) % DATA )
2014-02-27 17:41:27 +04:00
exit ( 1 )
2014-09-29 16:18:33 +04:00
log . info ( _ ( " The project has been removed from repository. " ) )
2014-04-15 16:55:49 +04:00
# Would be nice to invalidate only record corresponding to our project...
models . clear_cache ( )
2014-02-27 17:41:27 +04:00
@staticmethod
def fork_project ( models , proj_id , owner_id , target_name ) :
if owner_id > 0 :
DATA = {
' fork_name ' : target_name ,
' group_id ' : owner_id ,
}
else :
DATA = {
' fork_name ' : target_name ,
}
2014-09-29 16:18:33 +04:00
log . debug ( _ ( ' Forking project: ' ) + str ( DATA ) )
2014-02-27 17:41:27 +04:00
try :
result = models . jsn . fork_project ( DATA , proj_id )
2018-10-23 13:21:55 +03:00
except BadRequestError as ex :
2014-09-29 16:18:33 +04:00
log . error ( _ ( ' Sorry, but something went wrong and request I \' ve sent to ABF is bad. Please, '
' notify the console-client developers. Send them a set of command-line arguments and the request data: \n %s ' ) % DATA )
2014-02-27 17:41:27 +04:00
exit ( 1 )
2014-09-29 16:18:33 +04:00
log . info ( _ ( " The project has been forked. " ) )
2013-09-03 18:18:39 +04:00
2015-02-16 22:02:41 +04:00
@staticmethod
def alias_project ( models , proj_id , owner_id , target_name ) :
if owner_id > 0 :
DATA = {
' fork_name ' : target_name ,
' group_id ' : owner_id ,
}
else :
DATA = {
' fork_name ' : target_name ,
}
log . debug ( _ ( ' Creating alias for a project: ' ) + str ( DATA ) )
try :
result = models . jsn . alias_project ( DATA , proj_id )
2018-10-23 13:21:55 +03:00
except BadRequestError as ex :
2015-02-16 22:02:41 +04:00
log . error ( _ ( ' Sorry, but something went wrong and request I \' ve sent to ABF is bad. Please, '
' notify the console-client developers. Send them a set of command-line arguments and the request data: \n %s ' ) % DATA )
exit ( 1 )
log . info ( _ ( " The project alias has been created. " ) )
@staticmethod
def destroy_project ( models , proj_id ) :
DATA = {
' id ' : proj_id ,
}
log . debug ( _ ( ' Destroying project: ' ) + str ( proj_id ) )
try :
result = models . jsn . destroy_project ( DATA , proj_id )
2018-10-23 13:21:55 +03:00
except BadRequestError as ex :
2015-02-16 22:02:41 +04:00
log . error ( _ ( ' Sorry, but something went wrong and request I \' ve sent to ABF is bad. Please, '
' notify the console-client developers. Send them a set of command-line arguments and the request data: \n %s ' ) % DATA )
exit ( 1 )
log . info ( _ ( " The project has been destroyed. " ) )
2015-02-16 22:03:48 +04:00
2012-10-26 12:14:30 +04:00
class Models ( object ) :
_instance = { }
2012-11-14 15:04:19 +04:00
def __new__ ( cls , abf_url , file_store_url , login , password , * args , * * kwargs ) :
tmp = ' %s : %s : %s : %s ' % ( abf_url , file_store_url , login , password )
2012-10-26 12:14:30 +04:00
if tmp not in cls . _instance :
cls . _instance [ tmp ] = super ( Models , cls ) . __new__ (
2012-11-14 15:04:19 +04:00
cls , abf_url , file_store_url , login , password , * args , * * kwargs )
2012-10-26 12:14:30 +04:00
return cls . _instance [ tmp ]
2014-07-15 03:32:53 +04:00
2012-10-26 12:14:30 +04:00
#properties_by_class = {Platform: 'platforms', Repository: 'repositories', Arch: 'arches',
# BuildList: 'buildlists', Project: 'projects'}
#properties_by_name = dict([(properties_by_class[x], x) for x in properties_by_class])
2014-07-15 03:32:53 +04:00
2012-11-14 15:04:19 +04:00
def __init__ ( self , abf_url , file_store_url , login , password ) :
2014-09-29 16:18:33 +04:00
log . debug ( _ ( ' Initializing models for ' ) + abf_url )
2012-10-26 12:14:30 +04:00
self . abf_url = abf_url
2012-11-14 15:04:19 +04:00
self . file_store_url = file_store_url
2012-10-26 12:14:30 +04:00
self . login = login
self . password = password
2012-11-14 15:04:19 +04:00
self . jsn = AbfJson ( abf_url , file_store_url , login , password , log )
2014-07-15 03:32:53 +04:00
2012-10-26 12:14:30 +04:00
@staticmethod
def clear_cache ( ) :
lt_cache . clear ( )
st_cache . clear ( )
2014-07-15 03:32:53 +04:00