signature stringlengths 29 44.1k | implementation stringlengths 0 85.2k |
|---|---|
def progression_linear ( week , start_weight , final_weight , start_week , end_week ) :
"""A linear progression function going through the points
( ' start _ week ' , ' start _ weight ' ) and ( ' end _ week ' , ' final _ weight ' ) , evaluated
in ' week ' .
Parameters
week
The week to evaluate the linear ... | # Calculate the slope of the linear function
slope = ( start_weight - final_weight ) / ( start_week - end_week )
# Return the answer y = slope ( x - x _ 0 ) + y _ 0
return slope * ( week - start_week ) + start_weight |
def _replace_image ( image_url , image_tag , ebook_folder , image_name = None ) :
"""Replaces the src of an image to link to the local copy in the images folder of the ebook . Tightly coupled with bs4
package .
Args :
image _ url ( str ) : The url of the image .
image _ tag ( bs4 . element . Tag ) : The bs4... | try :
assert isinstance ( image_tag , bs4 . element . Tag )
except AssertionError :
raise TypeError ( "image_tag cannot be of type " + str ( type ( image_tag ) ) )
if image_name is None :
image_name = str ( uuid . uuid4 ( ) )
try :
image_full_path = os . path . join ( ebook_folder , 'images' )
asser... |
def multi_layer_feature ( body , from_layers , num_filters , strides , pads , min_filter = 128 ) :
"""Wrapper function to extract features from base network , attaching extra
layers and SSD specific layers
Parameters
from _ layers : list of str
feature extraction layers , use ' ' for add extra layers
For ... | # arguments check
assert len ( from_layers ) > 0
assert isinstance ( from_layers [ 0 ] , str ) and len ( from_layers [ 0 ] . strip ( ) ) > 0
assert len ( from_layers ) == len ( num_filters ) == len ( strides ) == len ( pads )
internals = body . get_internals ( )
layers = [ ]
for k , params in enumerate ( zip ( from_lay... |
def cell_styles ( self ) :
"""dict of { ( row name , col name ) : style }""" | styles = { }
for colname , col in self . dataframe . items ( ) :
for rowname , value in col . items ( ) :
if isinstance ( value , Value ) and value . style is not None :
style = value . style
if not isinstance ( style , CellStyle ) :
style = self . _named_styles [ sty... |
def get_error_probability ( self ) :
"""This means for the base we are talking about how many errors between 0 and 1 do we attribute to it ?
For the ' unobserved ' errors , these can only count when one is adjacent to base
: returns : error probability p ( error _ observed ) + ( 1 - p _ error _ observed ) * err... | a = self . _observable . get_error_probability ( )
b = self . _unobservable . get_error_probability ( )
return a + ( 1 - a ) * b |
def add_reorganize_data ( self , name , input_name , output_name , mode = 'SPACE_TO_DEPTH' , block_size = 2 ) :
"""Add a data reorganization layer of type " SPACE _ TO _ DEPTH " or " DEPTH _ TO _ SPACE " .
Parameters
name : str
The name of this layer .
input _ name : str
The input blob name of this layer ... | spec = self . spec
nn_spec = self . nn_spec
# Add a new layer
spec_layer = nn_spec . layers . add ( )
spec_layer . name = name
spec_layer . input . append ( input_name )
spec_layer . output . append ( output_name )
spec_layer_params = spec_layer . reorganizeData
# Set the parameters
if block_size < 2 :
raise ValueE... |
def connect_widget ( self , wid , getter = None , setter = None , signal = None , arg = None , update = True , flavour = None ) :
"""Finish set - up by connecting the widget . The model was already
specified in the constructor .
* wid * is a widget instance .
* getter * is a callable . It is passed * wid * an... | if wid in self . _wid_info :
raise ValueError ( "Widget " + str ( wid ) + " was already connected" )
wid_type = None
if None in ( getter , setter , signal ) :
w = search_adapter_info ( wid , flavour )
if getter is None :
getter = w [ GETTER ]
if setter is None :
setter = w [ SETTER ]
... |
def get_state_search_path_list ( saltenv = 'base' ) :
'''For the state file system , return a list of paths to search for states''' | # state cache should be updated before running this method
search_list = [ ]
cachedir = __opts__ . get ( 'cachedir' , None )
log . info ( "Searching for files in saltenv: %s" , saltenv )
path = cachedir + os . sep + "files" + os . sep + saltenv
search_list . append ( path )
return search_list |
def update_ids ( self , docids ) :
"""Update id - > pos mapping with new document ids .""" | logger . info ( "updating %i id mappings" % len ( docids ) )
for docid in docids :
if docid is not None :
pos = self . id2pos . get ( docid , None )
if pos is not None :
logger . info ( "replacing existing document %r in %s" % ( docid , self ) )
del self . pos2id [ pos ]
... |
def background_bin_from_string ( background_bins , data ) :
"""Return template ids for each bin as defined by the format string
Parameters
bins : list of strings
List of strings which define how a background bin is taken from the
list of templates .
data : dict of numpy . ndarrays
Dict with parameter ke... | used = numpy . array ( [ ] , dtype = numpy . uint32 )
bins = { }
for mbin in background_bins :
name , bin_type , boundary = tuple ( mbin . split ( ':' ) )
if boundary [ 0 : 2 ] == 'lt' :
member_func = lambda vals , bd = boundary : vals < float ( bd [ 2 : ] )
elif boundary [ 0 : 2 ] == 'gt' :
... |
def _docstring_key ( self , line ) :
"""Returns the key to use for the docblock immediately preceding
the specified line .""" | decormatch = self . docparser . RE_DECOR . match ( line )
if decormatch is not None :
key = "{}.{}" . format ( self . docelement . name , decormatch . group ( "name" ) )
else :
key = self . element . name
return key |
def print_tweets ( tweets ) :
"""Print a list of tweets one by one separated by " = " s .
Parameters
tweets : list ( dict )
A list of tweets . Each tweet is a dict containing the username of the tweet ' s author ,
the post time , and the tweet body .""" | print ( '=' * 60 )
for index , tweet in enumerate ( tweets ) :
print ( '-' * 60 )
print ( 'Tweet {}:' . format ( index ) )
print ( 'Username:' , tweet [ pytwis_constants . USERNAME_KEY ] )
print ( 'Time:' , datetime . datetime . fromtimestamp ( int ( tweet [ pytwis_constants . TWEET_UNIXTIME_KEY ] ) ) .... |
def send_metrics_to_cloudwatch ( self , rule , metric , dimensions ) :
"""Send metrics to CloudWatch for the given dimensions""" | timestamp = datetime . datetime . utcfromtimestamp ( metric . timestamp )
self . log . debug ( "CloudWatch: Attempting to publish metric: %s to %s " "with value (%s) for dimensions %s @%s" , rule [ 'name' ] , rule [ 'namespace' ] , str ( metric . value ) , str ( dimensions ) , str ( metric . timestamp ) )
try :
sel... |
def parse_signature ( self , statement , element , module = None ) :
"""Parses the specified line as a new version of the signature for ' element ' .
: arg statement : the string that has the new signature .
: arg element : the code element whose signature will be changed .""" | # If the signature changes , the user might not have had a chance to add the
# detailed member information for it yet . Here
# we will just update the modifiers and attributes . Also , since all the mods
# etc . will be overwritten , we don ' t need to handle replace separately .
smatch = self . RE_SIG . match ( statem... |
def compute_results ( self , voting_method , votes = None , winners = 1 , ** kwargs ) :
"""Compute voting results to decide the winner ( s ) from the
: attr : ` votes ` .
The votes should have been made for the current
: attr : ` ~ creamas . vote . VoteOrganizer . candidates ` .
: param voting _ method :
... | if votes is None :
votes = self . votes
if len ( votes ) == 0 :
self . _log ( logging . DEBUG , "Could not compute results as there are " "no votes!" )
return [ ]
self . _log ( logging . DEBUG , "Computing results from {} votes." . format ( len ( votes ) ) )
return voting_method ( self . candidates , votes ... |
def reporter ( self ) :
"""Creates a report of the results""" | logging . info ( 'Creating {} report' . format ( self . analysistype ) )
# Create the path in which the reports are stored
make_path ( self . reportpath )
header = 'Strain,Serotype\n'
data = ''
with open ( os . path . join ( self . reportpath , '{}.csv' . format ( self . analysistype ) ) , 'w' ) as report :
for sam... |
def _create_config_signature ( config ) :
"""return the signature for a config object .
The signature is computed as sha1 digest of the contents of
working _ directory , include _ paths , define _ symbols and
undefine _ symbols .
: param config : Configuration object
: type config : : class : ` parser . x... | m = hashlib . sha1 ( )
m . update ( config . working_directory . encode ( "utf-8" ) )
for p in config . include_paths :
m . update ( p . encode ( "utf-8" ) )
for p in config . define_symbols :
m . update ( p . encode ( "utf-8" ) )
for p in config . undefine_symbols :
m . update ( p . encode ( "utf-8" ) )
fo... |
def query_most_pic ( num , kind = '1' ) :
'''Query most pics .''' | return TabPost . select ( ) . where ( ( TabPost . kind == kind ) & ( TabPost . logo != "" ) ) . order_by ( TabPost . view_count . desc ( ) ) . limit ( num ) |
def save_object ( self , obj ) :
"""Save an object with Discipline
Only argument is a Django object . This function saves the object
( regardless of whether it already exists or not ) and registers with
Discipline , creating a new Action object . Do not use obj . save ( ) !""" | obj . save ( )
try :
save_object ( obj , editor = self )
except DisciplineException :
pass |
def get_daily ( self , date = None ) :
"""Get time entries for a date ( defaults to today ) .""" | if date == None :
return self . get ( "/daily.json" )
url = "/daily/{}/{}/{}.json" . format ( date . year , date . month , date . day )
return self . get ( url ) |
def _author_uid_get ( post ) :
"""Get the UID of the post author .
: param Post post : The post object to determine authorship of
: return : Author UID
: rtype : str""" | u = post . meta ( 'author.uid' )
return u if u else str ( current_user . uid ) |
def shorten_text ( self , text ) :
"""Shortens text to fit into the : attr : ` width ` .""" | if len ( text ) > self . width :
return text [ : self . width - 3 ] + '...'
return text |
def insert_asm ( self , addr , asm_code , before_label = False ) :
"""Insert some assembly code at the specific address . There must be an instruction starting at that address .
: param int addr : Address of insertion
: param str asm _ code : The assembly code to insert
: return : None""" | if before_label :
self . _inserted_asm_before_label [ addr ] . append ( asm_code )
else :
self . _inserted_asm_after_label [ addr ] . append ( asm_code ) |
def profile_validation ( self , status ) :
"""Return run total value .""" | self . selected_profile . data . setdefault ( 'validation_pass_count' , 0 )
self . selected_profile . data . setdefault ( 'validation_fail_count' , 0 )
if status :
self . selected_profile . data [ 'validation_pass_count' ] += 1
else :
self . selected_profile . data [ 'validation_fail_count' ] += 1 |
def _from_dict ( cls , _dict ) :
"""Initialize a DocStructure object from a json dictionary .""" | args = { }
if 'section_titles' in _dict :
args [ 'section_titles' ] = [ SectionTitles . _from_dict ( x ) for x in ( _dict . get ( 'section_titles' ) ) ]
if 'leading_sentences' in _dict :
args [ 'leading_sentences' ] = [ LeadingSentence . _from_dict ( x ) for x in ( _dict . get ( 'leading_sentences' ) ) ]
return... |
def items ( self , folder_id , subfolder_id , ann_id = None ) :
'''Yields an unodered generator of items in a subfolder .
The generator yields items , which are represented by a tuple
of ` ` content _ id ` ` and ` ` subtopic _ id ` ` . The format of these
identifiers is unspecified .
By default ( with ` ` a... | self . assert_valid_folder_id ( folder_id )
self . assert_valid_folder_id ( subfolder_id )
ann_id = self . _annotator ( ann_id )
folder_cid = self . wrap_folder_content_id ( ann_id , folder_id )
subfolder_sid = self . wrap_subfolder_subtopic_id ( subfolder_id )
ident = ( folder_cid , subfolder_sid )
if self . store . g... |
def check ( self , func = None , name = None ) :
"""A decorator to register a new Dockerflow check to be run
when the / _ _ heartbeat _ _ endpoint is called . , e . g . : :
from dockerflow . flask import checks
@ dockerflow . check
def storage _ reachable ( ) :
try :
acme . storage . ping ( )
except S... | if func is None :
return functools . partial ( self . check , name = name )
if name is None :
name = func . __name__
self . logger . info ( 'Registered Dockerflow check %s' , name )
@ functools . wraps ( func )
def decorated_function ( * args , ** kwargs ) :
self . logger . info ( 'Called Dockerflow check %... |
def _is_qstring ( message ) :
"""Check if its a QString without adding any dep to PyQt5.""" | my_class = str ( message . __class__ )
my_class_name = my_class . replace ( '<class \'' , '' ) . replace ( '\'>' , '' )
if my_class_name == 'PyQt5.QtCore.QString' :
return True
return False |
def ne ( self , other , axis = "columns" , level = None ) :
"""Checks element - wise that this is not equal to other .
Args :
other : A DataFrame or Series or scalar to compare to .
axis : The axis to perform the ne over .
level : The Multilevel index level to apply ne over .
Returns :
A new DataFrame f... | return self . _binary_op ( "ne" , other , axis = axis , level = level ) |
def set_autocamera ( self , mode = 'density' ) :
"""- set _ autocamera ( mode = ' density ' ) : By default , Scene defines its
own Camera . However , there is no a general way for doing so . Scene
uses a density criterion for getting the point of view . If this is
not a good option for your problem , you can ... | self . Camera . set_autocamera ( self . _Particles , mode = mode )
self . _camera_params = self . Camera . get_params ( )
self . _x , self . _y , self . _hsml , self . _kview = self . __compute_scene ( )
self . _m = self . _Particles . _mass [ self . _kview ] |
def cat ( args ) :
"""% prog cat * . pdf - o output . pdf
Concatenate pages from pdf files into a single pdf file .
Page ranges refer to the previously - named file .
A file not followed by a page range means all the pages of the file .
PAGE RANGES are like Python slices .
{ page _ range _ help }
EXAMPL... | p = OptionParser ( cat . __doc__ . format ( page_range_help = PAGE_RANGE_HELP ) )
p . add_option ( "--nosort" , default = False , action = "store_true" , help = "Do not sort file names" )
p . add_option ( "--cleanup" , default = False , action = "store_true" , help = "Remove individual pdfs after merging" )
p . set_out... |
def is_binary ( var , allow_none = False ) :
"""Returns True if var is a binary ( bytes ) objects
Result py - 2 py - 3
b ' bytes literal ' True True
' string literal ' True False
u ' unicode literal ' False False
Also works with the corresponding numpy types .""" | return isinstance ( var , six . binary_type ) or ( var is None and allow_none ) |
def confidence_intervals ( self , X , width = .95 , quantiles = None ) :
"""estimate confidence intervals for the model .
Parameters
X : array - like of shape ( n _ samples , m _ features )
Input data matrix
width : float on [ 0,1 ] , optional
quantiles : array - like of floats in ( 0 , 1 ) , optional
I... | if not self . _is_fitted :
raise AttributeError ( 'GAM has not been fitted. Call fit first.' )
X = check_X ( X , n_feats = self . statistics_ [ 'm_features' ] , edge_knots = self . edge_knots_ , dtypes = self . dtype , features = self . feature , verbose = self . verbose )
return self . _get_quantiles ( X , width ,... |
def check_common_elements_order ( list1 , list2 ) :
"""Function to verify if the common elements between two given lists maintain the same sequence .
Examples :
check _ common _ elements _ order ( [ ' red ' , ' green ' , ' black ' , ' orange ' ] , [ ' red ' , ' pink ' , ' green ' , ' white ' , ' black ' ] ) - >... | shared_elements = set ( list1 ) & set ( list2 )
ordered_list1 = [ item for item in list1 if item in shared_elements ]
ordered_list2 = [ item for item in list2 if item in shared_elements ]
return ordered_list1 == ordered_list2 |
def get_quant_NAs ( quantdata , quantheader ) :
"""Takes quantdata in a dict and header with quantkeys
( eg iTRAQ isotopes ) . Returns dict of quant intensities
with missing keys set to NA .""" | out = { }
for qkey in quantheader :
out [ qkey ] = quantdata . get ( qkey , 'NA' )
return out |
def components ( self , extra_params = None ) :
"""All components in this Space""" | return self . api . _get_json ( Component , space = self , rel_path = self . _build_rel_path ( 'ticket_components' ) , extra_params = extra_params , ) |
def infer_modifications ( stmts ) :
"""Return inferred Modification from RegulateActivity + ActiveForm .
This function looks for combinations of Activation / Inhibition Statements
and ActiveForm Statements that imply a Modification Statement .
For example , if we know that A activates B , and phosphorylated B... | linked_stmts = [ ]
for act_stmt in _get_statements_by_type ( stmts , RegulateActivity ) :
for af_stmt in _get_statements_by_type ( stmts , ActiveForm ) :
if not af_stmt . agent . entity_matches ( act_stmt . obj ) :
continue
mods = af_stmt . agent . mods
# Make sure the ActiveForm... |
def AddSpecification ( self , specification ) :
"""Adds a format specification .
Args :
specification ( FormatSpecification ) : format specification .
Raises :
KeyError : if the store already contains a specification with
the same identifier .""" | if specification . identifier in self . _format_specifications :
raise KeyError ( 'Format specification {0:s} is already defined in store.' . format ( specification . identifier ) )
self . _format_specifications [ specification . identifier ] = specification
for signature in specification . signatures :
signatu... |
def parse_string ( self , string ) :
"""Parse ASCII output of JPrintMeta""" | self . log . info ( "Parsing ASCII data" )
if not string :
self . log . warning ( "Empty metadata" )
return
lines = string . splitlines ( )
application_data = [ ]
application = lines [ 0 ] . split ( ) [ 0 ]
self . log . debug ( "Reading meta information for '%s'" % application )
for line in lines :
if appli... |
def models ( self ) :
"""Return all the models defined for this module""" | app = get_app ( self . __class__ . __module__ . split ( '.' ) [ - 2 ] )
return get_models ( app ) |
def _append ( self , menu ) :
'''append this menu item to a menu''' | menu . AppendCheckItem ( self . id ( ) , self . name , self . description )
menu . Check ( self . id ( ) , self . checked ) |
def split_by_idxs ( seq , idxs ) :
'''A generator that returns sequence pieces , seperated by indexes specified in idxs .''' | last = 0
for idx in idxs :
if not ( - len ( seq ) <= idx < len ( seq ) ) :
raise KeyError ( f'Idx {idx} is out-of-bounds' )
yield seq [ last : idx ]
last = idx
yield seq [ last : ] |
def cmd_relay ( self , args ) :
'''set relays''' | if len ( args ) == 0 or args [ 0 ] not in [ 'set' , 'repeat' ] :
print ( "Usage: relay <set|repeat>" )
return
if args [ 0 ] == "set" :
if len ( args ) < 3 :
print ( "Usage: relay set <RELAY_NUM> <0|1>" )
return
self . master . mav . command_long_send ( self . target_system , self . targe... |
def create_doc_id_from_json ( doc ) -> str :
"""Docs with identical contents get the same ID .
Args :
doc :
Returns : a string with the hash of the given document .""" | return hashlib . sha256 ( json . dumps ( doc , sort_keys = True ) . encode ( 'utf-8' ) ) . hexdigest ( ) |
def asDirect ( self ) :
"""Returns the image data as a direct representation of an
` ` x * y * planes ` ` array . This method is intended to remove the
need for callers to deal with palettes and transparency
themselves . Images with a palette ( colour type 3)
are converted to RGB or RGBA ; images with trans... | self . preamble ( )
# Simple case , no conversion necessary .
if not self . colormap and not self . trns and not self . sbit :
return self . read ( )
x , y , pixels , meta = self . read ( )
if self . colormap :
meta [ 'colormap' ] = False
meta [ 'alpha' ] = bool ( self . trns )
meta [ 'bitdepth' ] = 8
... |
def ordering_step ( self , oneway = False ) :
"""iterator that computes all vertices ordering in their layers
( one layer after the other from top to bottom , to top again unless
oneway is True ) .""" | self . dirv = - 1
crossings = 0
for l in self . layers :
mvmt = l . order ( )
crossings += mvmt
yield ( l , mvmt )
if oneway or ( crossings == 0 ) :
return
self . dirv = + 1
while l :
mvmt = l . order ( )
yield ( l , mvmt )
l = l . nextlayer ( ) |
def _AddMessageMethods ( message_descriptor , cls ) :
"""Adds implementations of all Message methods to cls .""" | _AddListFieldsMethod ( message_descriptor , cls )
_AddHasFieldMethod ( message_descriptor , cls )
_AddClearFieldMethod ( message_descriptor , cls )
if message_descriptor . is_extendable :
_AddClearExtensionMethod ( cls )
_AddHasExtensionMethod ( cls )
_AddEqualsMethod ( message_descriptor , cls )
_AddStrMethod ... |
def info ( self , section = 'default' ) :
"""Get information and statistics about the server .
If called without argument will return default set of sections .
For available sections , see http : / / redis . io / commands / INFO
: raises ValueError : if section is invalid""" | if not section :
raise ValueError ( "invalid section" )
fut = self . execute ( b'INFO' , section , encoding = 'utf-8' )
return wait_convert ( fut , parse_info ) |
async def is_ready ( self ) :
"""Check if the multi - environment has been fully initialized .
This calls each slave environment managers ' : py : meth : ` is _ ready ` and
checks if the multi - environment itself is ready by calling
: py : meth : ` ~ creamas . mp . MultiEnvironment . check _ ready ` .
. . ... | async def slave_task ( addr , timeout ) :
try :
r_manager = await self . env . connect ( addr , timeout = timeout )
ready = await r_manager . is_ready ( )
if not ready :
return False
except :
return False
return True
if not self . env . is_ready ( ) :
return F... |
def _prepare_sort_options ( self , has_pk ) :
"""Prepare sort options for _ values attributes .
If we manager sort by score after getting the result , we do not want to
get values from the first sort call , but only from the last one , after
converting results in zset into keys""" | sort_options = super ( ExtendedCollectionManager , self ) . _prepare_sort_options ( has_pk )
if self . _values : # if we asked for values , we have to use the redis ' sort '
# command , which is able to return other fields .
if not sort_options :
sort_options = { }
sort_options [ 'get' ] = self . _value... |
def calc_2d_ellipse_properties ( cov , nstd = 2 ) :
"""Calculate the properties for 2d ellipse given the covariance matrix .""" | def eigsorted ( cov ) :
vals , vecs = np . linalg . eigh ( cov )
order = vals . argsort ( ) [ : : - 1 ]
return vals [ order ] , vecs [ : , order ]
vals , vecs = eigsorted ( cov )
width , height = 2 * nstd * np . sqrt ( vals [ : 2 ] )
normal = vecs [ : , 2 ] if vecs [ 2 , 2 ] > 0 else - vecs [ : , 2 ]
d = np... |
def to_java_doubles ( m ) :
'''to _ java _ doubles ( m ) yields a java array object for the vector or matrix m .''' | global _java
if _java is None :
_init_registration ( )
m = np . asarray ( m )
dims = len ( m . shape )
if dims > 2 :
raise ValueError ( '1D and 2D arrays supported only' )
bindat = serialize_numpy ( m , 'd' )
return ( _java . jvm . nben . util . Numpy . double2FromBytes ( bindat ) if dims == 2 else _java . jvm ... |
def build_pdf ( source , texinputs = [ ] , builder = None ) :
"""Builds a LaTeX source to PDF .
Will automatically instantiate an available builder ( or raise a
: class : ` exceptions . RuntimeError ` if none are available ) and build the
supplied source with it .
Parameters are passed on to the builder ' s... | if builder is None :
builders = PREFERRED_BUILDERS
elif builder not in BUILDERS :
raise RuntimeError ( 'Invalid Builder specified' )
else :
builders = ( builder , )
for bld in builders :
bld_cls = BUILDERS [ bld ]
builder = bld_cls ( )
if not builder . is_available ( ) :
continue
ret... |
def stop_containers ( self ) :
"""Stops all containers used by this instance of the backend .""" | while len ( self . _containers ) :
container = self . _containers . pop ( )
try :
container . kill ( signal . SIGKILL )
except docker . errors . APIError : # probably doesn ' t exist anymore
pass |
def ckgpav ( inst , sclkdp , tol , ref ) :
"""Get pointing ( attitude ) and angular velocity
for a specified spacecraft clock time .
http : / / naif . jpl . nasa . gov / pub / naif / toolkit _ docs / C / cspice / ckgpav _ c . html
: param inst : NAIF ID of instrument , spacecraft , or structure .
: type ins... | inst = ctypes . c_int ( inst )
sclkdp = ctypes . c_double ( sclkdp )
tol = ctypes . c_double ( tol )
ref = stypes . stringToCharP ( ref )
cmat = stypes . emptyDoubleMatrix ( )
av = stypes . emptyDoubleVector ( 3 )
clkout = ctypes . c_double ( )
found = ctypes . c_int ( )
libspice . ckgpav_c ( inst , sclkdp , tol , ref ... |
def gen_locustfile ( testcase_file_path ) :
"""generate locustfile from template .""" | locustfile_path = 'locustfile.py'
template_path = os . path . join ( os . path . dirname ( os . path . realpath ( __file__ ) ) , "templates" , "locustfile_template" )
with io . open ( template_path , encoding = 'utf-8' ) as template :
with io . open ( locustfile_path , 'w' , encoding = 'utf-8' ) as locustfile :
... |
def route ( regex , method , name ) :
"""Route the decorated view .
: param regex : A string describing a regular expression to which the request path will be matched .
: param method : A string describing the HTTP method that this view accepts .
: param name : A string describing the name of the URL pattern ... | def decorator ( function ) :
function . route = routes . route ( regex = regex , view = function . __name__ , method = method , name = name )
@ wraps ( function )
def wrapper ( self , * args , ** kwargs ) :
return function ( self , * args , ** kwargs )
return wrapper
return decorator |
def next ( self ) :
"""Pops and returns the first outgoing message from the list .
If message list currently has no messages , the calling thread will
be put to sleep until we have at - least one message in the list that
can be popped and returned .""" | # We pick the first outgoing available and send it .
outgoing_msg = self . outgoing_msg_list . pop_first ( )
# If we do not have any outgoing msg . , we wait .
if outgoing_msg is None :
self . outgoing_msg_event . clear ( )
self . outgoing_msg_event . wait ( )
outgoing_msg = self . outgoing_msg_list . pop_f... |
def download ( URLs , dest_dir = '.' , dest_file = None , decompress = False , max_jobs = 5 ) :
'''Download files from specified URL , which should be space , tab or
newline separated URLs . The files will be downloaded to specified
destination . If ` filename . md5 ` files are downloaded , they are used to
v... | if env . config [ 'run_mode' ] == 'dryrun' :
print ( f'HINT: download\n{URLs}\n' )
return None
if isinstance ( URLs , str ) :
urls = [ x . strip ( ) for x in URLs . split ( ) if x . strip ( ) ]
else :
urls = list ( URLs )
if not urls :
env . logger . debug ( f'No download URL specified: {URLs}' )
... |
def main ( github_token , github_api_url , progress ) :
"""A CLI to easily manage GitHub releases , assets and references .""" | global progress_reporter_cls
progress_reporter_cls . reportProgress = sys . stdout . isatty ( ) and progress
if progress_reporter_cls . reportProgress :
progress_reporter_cls = _progress_bar
global _github_token_cli_arg
_github_token_cli_arg = github_token
global _github_api_url
_github_api_url = github_api_url |
def get_max_muO2 ( self , min_voltage = None , max_voltage = None ) :
"""Maximum critical oxygen chemical potential along path .
Args :
min _ voltage : The minimum allowable voltage .
max _ voltage : The maximum allowable voltage .
Returns :
Maximum critical oxygen chemical of all compounds along the
in... | data = [ ]
for pair in self . _select_in_voltage_range ( min_voltage , max_voltage ) :
if pair . muO2_discharge is not None :
data . extend ( [ d [ 'chempot' ] for d in pair . muO2_discharge ] )
if pair . muO2_charge is not None :
data . extend ( [ d [ 'chempot' ] for d in pair . muO2_discharge ... |
def get_residuals ( ds , m ) :
"""Using the dataset and model object , calculate the residuals and return
Parameters
ds : dataset object
m : model object
Return
residuals : array of residuals , spec minus model spec""" | model_spectra = get_model_spectra ( ds , m )
resid = ds . test_flux - model_spectra
return resid |
def next_builder ( self ) :
"""Create a new builder based off of this one with its sequence number
incremented .
: return : A new Builder instance
: rtype : : class : ` Builder `""" | sequence = self . sequence + 1
next_builder = Builder ( horizon_uri = self . horizon . horizon_uri , address = self . address , network = self . network , sequence = sequence , fee = self . fee )
next_builder . keypair = self . keypair
return next_builder |
def get_default_config ( self ) :
"""Returns the default collector settings""" | config = super ( NetstatCollector , self ) . get_default_config ( )
config . update ( { 'path' : 'netstat' , } )
return config |
def render_word ( self , text , tag , i ) :
"""Render individual word .
text ( unicode ) : Word text .
tag ( unicode ) : Part - of - speech tag .
i ( int ) : Unique ID , typically word index .
RETURNS ( unicode ) : Rendered SVG markup .""" | y = self . offset_y + self . word_spacing
x = self . offset_x + i * self . distance
if self . direction == "rtl" :
x = self . width - x
html_text = escape_html ( text )
return TPL_DEP_WORDS . format ( text = html_text , tag = tag , x = x , y = y ) |
def _get_singlekws ( skw_matches , spires = False ) :
"""Get single keywords .
: var skw _ matches : dict of { keyword : [ info , . . . ] }
: keyword spires : bool , to get the spires output
: return : list of formatted keywords""" | output = { }
for single_keyword , info in skw_matches :
output [ single_keyword . output ( spires ) ] = len ( info [ 0 ] )
output = [ { 'keyword' : key , 'number' : value } for key , value in output . iteritems ( ) ]
return sorted ( output , key = lambda x : x [ 'number' ] , reverse = True ) |
def offset ( self , offset ) :
"""Move all the intervals in the list by the given ` ` offset ` ` .
: param offset : the shift to be applied
: type offset : : class : ` ~ aeneas . exacttiming . TimeValue `
: raises TypeError : if ` ` offset ` ` is not an instance of ` ` TimeValue ` `""" | self . log ( u"Applying offset to all fragments..." )
self . log ( [ u" Offset %.3f" , offset ] )
for fragment in self . fragments :
fragment . interval . offset ( offset = offset , allow_negative = False , min_begin_value = self . begin , max_end_value = self . end )
self . log ( u"Applying offset to all fragment... |
def mozjpeg ( ext_args ) :
"""Create argument list for mozjpeg .""" | args = copy . copy ( _MOZJPEG_ARGS )
if Settings . destroy_metadata :
args += [ "-copy" , "none" ]
else :
args += [ "-copy" , "all" ]
args += [ '-outfile' ]
args += [ ext_args . new_filename , ext_args . old_filename ]
extern . run_ext ( args )
return _JPEG_FORMAT |
def _parse_css_color ( color ) :
'''_ parse _ css _ color ( css _ color ) - > gtk . gdk . Color''' | if color . startswith ( "rgb(" ) and color . endswith ( ')' ) :
r , g , b = [ int ( c ) * 257 for c in color [ 4 : - 1 ] . split ( ',' ) ]
return gtk . gdk . Color ( r , g , b )
else :
return gtk . gdk . color_parse ( color ) |
def _makeStoreOwnerPerson ( self ) :
"""Make a L { Person } representing the owner of the store that this
L { Organizer } is installed in .
@ rtype : L { Person }""" | if self . store is None :
return None
userInfo = self . store . findFirst ( signup . UserInfo )
name = u''
if userInfo is not None :
name = userInfo . realName
account = self . store . findUnique ( LoginAccount , LoginAccount . avatars == self . store , None )
ownerPerson = self . createPerson ( name )
if accou... |
def update ( self , session , arrays = None , frame = None ) :
'''Creates a frame and writes it to disk .
Args :
arrays : a list of np arrays . Use the " custom " option in the client .
frame : a 2D np array . This way the plugin can be used for video of any
kind , not just the visualization that comes with... | new_config = self . _get_config ( )
if self . _enough_time_has_passed ( self . previous_config [ 'FPS' ] ) :
self . visualizer . update ( new_config )
self . last_update_time = time . time ( )
final_image = self . _update_frame ( session , arrays , frame , new_config )
self . _update_recording ( final_i... |
def _reset ( self , framer ) :
"""Reset the state for the framer . It is safe to call this
method multiple times with the same framer ; the ID of the
framer object will be saved and the state only reset if the
IDs are different . After resetting the state , the framer ' s
` ` init _ state ( ) ` ` method wil... | # Do nothing if we ' re already properly initialized
if id ( framer ) == self . _framer_id :
return
# Reset the state
self . _other = { }
# Initialize the state and save the framer ID
framer . init_state ( self )
self . _framer_id = id ( framer ) |
async def handle_exception ( self , exc : Exception , action : str , request_id ) :
"""Handle any exception that occurs , by sending an appropriate message""" | if isinstance ( exc , APIException ) :
await self . reply ( action = action , errors = self . _format_errors ( exc . detail ) , status = exc . status_code , request_id = request_id )
elif exc == Http404 or isinstance ( exc , Http404 ) :
await self . reply ( action = action , errors = self . _format_errors ( 'No... |
def close_others ( self ) :
"""Closes every editors tabs except the current one .""" | current_widget = self . widget ( self . tab_under_menu ( ) )
if self . _try_close_dirty_tabs ( exept = current_widget ) :
i = 0
while self . count ( ) > 1 :
widget = self . widget ( i )
if widget != current_widget :
self . remove_tab ( i )
else :
i = 1 |
def grid_search ( script : str , params : typing . Iterable [ str ] , dry_run : bool = False ) -> None :
"""Build all grid search parameter configurations and optionally run them .
: param script : String of command prefix , e . g . ` ` cxflow train - v - o log ` ` .
: param params : Iterable collection of stri... | commands = _build_grid_search_commands ( script = script , params = params )
if dry_run :
logging . warning ( 'Dry run' )
for command in commands :
logging . info ( command )
else :
for command in commands :
try :
completed_process = subprocess . run ( command )
loggi... |
def single_node_env ( num_gpus = 1 ) :
"""Setup environment variables for Hadoop compatibility and GPU allocation""" | import tensorflow as tf
# ensure expanded CLASSPATH w / o glob characters ( required for Spark 2.1 + JNI )
if 'HADOOP_PREFIX' in os . environ and 'TFOS_CLASSPATH_UPDATED' not in os . environ :
classpath = os . environ [ 'CLASSPATH' ]
hadoop_path = os . path . join ( os . environ [ 'HADOOP_PREFIX' ] , 'bin' , 'h... |
def close ( self , reply_code = 0 , reply_text = '' , class_id = 0 , method_id = 0 ) :
'''Close this channel . Routes to channel . close .''' | # In the off chance that we call this twice . A good example is if
# there ' s an error in close listeners and so we ' re still inside a
# single call to process _ frames , which will try to close this channel
# if there ' s an exception .
if hasattr ( self , 'channel' ) :
self . channel . close ( reply_code , repl... |
def tocimxml ( self ) :
"""Return the CIM - XML representation of this CIM property ,
as an object of an appropriate subclass of : term : ` Element ` .
The returned CIM - XML representation is a ` PROPERTY ` ,
` PROPERTY . REFERENCE ` , or ` PROPERTY . ARRAY ` element dependent on the
property type , and co... | qualifiers = [ q . tocimxml ( ) for q in self . qualifiers . values ( ) ]
if self . is_array : # pylint : disable = no - else - return
assert self . type != 'reference'
if self . value is None :
value_xml = None
else :
array_xml = [ ]
for v in self . value :
if v is None ... |
def ecg_systole ( ecg , rpeaks , t_waves_ends ) :
"""Returns the localization of systoles and diastoles .
Parameters
ecg : list or ndarray
ECG signal ( preferably filtered ) .
rpeaks : list or ndarray
R peaks localization .
t _ waves _ ends : list or ndarray
T waves localization .
Returns
systole ... | waves = np . array ( [ "" ] * len ( ecg ) )
waves [ rpeaks ] = "R"
waves [ t_waves_ends ] = "T"
systole = [ 0 ]
current = 0
for index , value in enumerate ( waves [ 1 : ] ) :
if waves [ index - 1 ] == "R" :
current = 1
if waves [ index - 1 ] == "T" :
current = 0
systole . append ( current )
... |
def omero_bin ( self , command ) :
"""Runs the omero command - line client with an array of arguments using the
old environment""" | assert isinstance ( command , list )
if not self . old_env :
raise Exception ( 'Old environment not initialised' )
log . info ( "Running [old environment]: %s" , " " . join ( command ) )
self . run ( 'omero' , command , capturestd = True , env = self . old_env ) |
def has_entities ( status ) :
"""Returns true if a Status object has entities .
Args :
status : either a tweepy . Status object or a dict returned from Twitter API""" | try :
if sum ( len ( v ) for v in status . entities . values ( ) ) > 0 :
return True
except AttributeError :
if sum ( len ( v ) for v in status [ 'entities' ] . values ( ) ) > 0 :
return True
return False |
def get_version ( dev_version = False ) :
"""Generates a version string .
Arguments :
dev _ version : Generate a verbose development version from git commits .
Examples :
1.1
1.1 . dev43 # If ' dev _ version ' was passed .""" | if dev_version :
version = git_dev_version ( )
if not version :
raise RuntimeError ( "Could not generate dev version from git." )
return version
return "1!%d.%d" % ( MAJOR , MINOR ) |
def play ( self ) :
"""Send signal to resume playback at the paused offset""" | self . _response [ 'shouldEndSession' ] = True
self . _response [ 'action' ] [ 'audio' ] [ 'interface' ] = 'play'
self . _response [ 'action' ] [ 'audio' ] [ 'sources' ] = [ ]
return self |
def limit ( self , maximum ) :
"""Return a new query , limited to a certain number of results .
Unlike core reporting queries , you cannot specify a starting
point for live queries , just the maximum results returned .
` ` ` python
# first 50
query . limit ( 50)""" | self . meta [ 'limit' ] = maximum
self . raw . update ( { 'max_results' : maximum , } )
return self |
def add_triple ( self , subj : Union [ URIRef , str ] , pred : Union [ URIRef , str ] , obj : Union [ URIRef , Literal , str ] ) -> None :
"""Adds triple to rdflib Graph
Triple can be of any subject , predicate , and object of the entity without a need for order .
Args :
subj : Entity subject
pred : Entity ... | if obj in [ None , "" , " " ] :
return
# Empty objects are bad practice
_subj = self . process_subj_or_pred ( subj )
_pred = self . process_subj_or_pred ( pred )
_obj = self . process_obj ( obj )
self . g . add ( ( _subj , _pred , _obj ) ) |
def xmoe2_v1_l4k_compressed_c4 ( ) :
"""With compressed attention .""" | hparams = xmoe2_v1_l4k ( )
hparams . decoder_layers = [ "compressed_att" if l == "att" else l for l in hparams . decoder_layers ]
hparams . compression_factor = 4
return hparams |
def get_broks ( self , broker_name ) :
"""Send a HTTP request to the satellite ( GET / _ broks )
Get broks from the satellite .
Un - serialize data received .
: param broker _ name : the concerned broker link
: type broker _ name : BrokerLink
: return : Broks list on success , [ ] on failure
: rtype : l... | res = self . con . get ( '_broks' , { 'broker_name' : broker_name } , wait = False )
logger . debug ( "Got broks from %s: %s" , self . name , res )
return unserialize ( res , True ) |
async def close_wallet_search ( wallet_search_handle : int ) -> None :
"""Close wallet search ( make search handle invalid )
: param wallet _ search _ handle : wallet wallet handle ( created by open _ wallet _ search )
: return : None""" | logger = logging . getLogger ( __name__ )
logger . debug ( "close_wallet_search: >>> wallet_search_handle: %r" , wallet_search_handle )
if not hasattr ( close_wallet_search , "cb" ) :
logger . debug ( "close_wallet_search: Creating callback" )
close_wallet_search . cb = create_cb ( CFUNCTYPE ( None , c_int32 , ... |
def most_similar ( self , word , number = 5 ) :
"""Run a similarity query , retrieving number
most similar words .""" | if self . word_vectors is None :
raise Exception ( 'Model must be fit before querying' )
if self . dictionary is None :
raise Exception ( 'No word dictionary supplied' )
try :
word_idx = self . dictionary [ word ]
except KeyError :
raise Exception ( 'Word not in dictionary' )
return self . _similarity_q... |
def audiorate ( filename ) :
"""Determines the samplerate of the given audio recording file
: param filename : filename of the audiofile
: type filename : str
: returns : int - - samplerate of the recording""" | if '.wav' in filename . lower ( ) :
wf = wave . open ( filename )
fs = wf . getframerate ( )
wf . close ( )
elif '.call' in filename . lower ( ) :
fs = 333333
else :
raise IOError ( "Unsupported audio format for file: {}" . format ( filename ) )
return fs |
def _createBitpattern ( functioncode , value ) :
"""Create the bit pattern that is used for writing single bits .
This is basically a storage of numerical constants .
Args :
* functioncode ( int ) : can be 5 or 15
* value ( int ) : can be 0 or 1
Returns :
The bit pattern ( string ) .
Raises :
TypeEr... | _checkFunctioncode ( functioncode , [ 5 , 15 ] )
_checkInt ( value , minvalue = 0 , maxvalue = 1 , description = 'inputvalue' )
if functioncode == 5 :
if value == 0 :
return '\x00\x00'
else :
return '\xff\x00'
elif functioncode == 15 :
if value == 0 :
return '\x00'
else :
... |
def flush ( self , hard = False ) :
"""Drop existing entries from the cache .
Args :
hard ( bool ) : If True , all current entries are flushed from the
server ( s ) , which affects all users . If False , only the local
process is affected .""" | if not self . servers :
return
if hard :
self . client . flush_all ( )
self . reset_stats ( )
else :
from uuid import uuid4
tag = uuid4 ( ) . hex
if self . debug :
tag = "flushed" + tag
self . current = tag |
def split_arg_to_name_type_value ( self , args_list ) :
"""Split argument text to name , type , value .""" | for arg in args_list :
arg_type = None
arg_value = None
has_type = False
has_value = False
pos_colon = arg . find ( ':' )
pos_equal = arg . find ( '=' )
if pos_equal > - 1 :
has_value = True
if pos_colon > - 1 :
if not has_value :
has_type = True
elif ... |
def n_orifices_per_row ( self ) :
"""Calculate number of orifices at each level given an orifice
diameter .""" | # H is distance from the bottom of the next row of orifices to the
# center of the current row of orifices
H = self . b_rows - 0.5 * self . orifice_diameter
flow_per_orifice = pc . flow_orifice_vert ( self . orifice_diameter , H , con . VC_ORIFICE_RATIO )
n = np . zeros ( self . n_rows )
for i in range ( self . n_rows ... |
def parts ( self ) :
"""Return an array of batch parts to submit""" | parts = [ ]
upserts = dict ( )
deletes = [ ]
# we keep track of the batch size as we go ( pretty close approximation ! ) so we can chunk it small enough
# to limit the HTTP posts to under 700KB - server limits to 750KB , so play it safe
max_upload_size = 700000
# loop upserts first - fit the deletes in afterward
# ' { ... |
def _keepVol ( self , vol ) :
"""Mark this volume to be kept in path .""" | if vol is None :
return
if vol in self . extraVolumes :
del self . extraVolumes [ vol ]
return
if vol not in self . paths :
raise Exception ( "%s not in %s" % ( vol , self ) )
paths = [ os . path . basename ( path ) for path in self . paths [ vol ] ]
newPath = self . selectReceivePath ( paths )
if self ... |
def get_sls_opts ( opts , ** kwargs ) :
'''Return a copy of the opts for use , optionally load a local config on top''' | opts = copy . deepcopy ( opts )
if 'localconfig' in kwargs :
return salt . config . minion_config ( kwargs [ 'localconfig' ] , defaults = opts )
if 'saltenv' in kwargs :
saltenv = kwargs [ 'saltenv' ]
if saltenv is not None :
if not isinstance ( saltenv , six . string_types ) :
saltenv =... |
def _create_latent_variables ( self ) :
"""Creates model latent variables
Returns
None ( changes model attributes )""" | for parm in range ( self . z_no ) :
self . latent_variables . add_z ( 'Scale ' + self . X_names [ parm ] , fam . Flat ( transform = 'exp' ) , fam . Normal ( 0 , 3 ) )
self . latent_variables . z_list [ parm ] . start = - 5.0
self . z_no = len ( self . latent_variables . z_list ) |
def _get_by_id ( collection , id ) :
'''Get item from a list by the id field''' | matches = [ item for item in collection if item . id == id ]
if not matches :
raise ValueError ( 'Could not find a matching item' )
elif len ( matches ) > 1 :
raise ValueError ( 'The id matched {0} items, not 1' . format ( len ( matches ) ) )
return matches [ 0 ] |
def getlang_by_native_name ( native_name ) :
"""Try to lookup a Language object by native _ name , e . g . ' English ' , in internal language list .
Returns None if lookup by language name fails in resources / languagelookup . json .""" | direct_match = _iget ( native_name , _LANGUAGE_NATIVE_NAME_LOOKUP )
if direct_match :
return direct_match
else :
simple_native_name = native_name . split ( ',' ) [ 0 ]
# take part before comma
simple_native_name = simple_native_name . split ( '(' ) [ 0 ] . strip ( )
# and before any bracket
retu... |
End of preview. Expand in Data Studio
README.md exists but content is empty.
- Downloads last month
- 15